@article{Chen_Qiu_Liu_Huang_2018, title={Meta Multi-Task Learning for Sequence Modeling}, volume={32}, url={https://ojs.aaai.org/index.php/AAAI/article/view/12007}, DOI={10.1609/aaai.v32i1.12007}, abstractNote={ <p> Semantic composition functions have been playing a pivotal role in neural representation learning of text sequences. In spite of their success, most existing models suffer from the underfitting problem: they use the same shared compositional function on all the positions in the sequence, thereby lacking expressive power due to incapacity to capture the richness of compositionality. Besides, the composition functions of different tasks are independent and learned from scratch. In this paper, we propose a new sharing scheme of composition function across multiple tasks. Specifically, we use a shared meta-network to capture the meta-knowledge of semantic composition and generate the parameters of the task-specific semantic composition models. We conduct extensive experiments on two types of tasks, text classification and sequence tagging, which demonstrate the benefits of our approach. Besides, we show that the shared meta-knowledge learned by our proposed model can be regarded as off-the-shelf knowledge and easily transferred to new tasks. </p> }, number={1}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Chen, Junkun and Qiu, Xipeng and Liu, Pengfei and Huang, Xuanjing}, year={2018}, month={Apr.} }