@article{Liu_Wang_Wang_Liang_Chen_Jiang_Xiao_Chen_2021, title={Learning Term Embeddings for Lexical Taxonomies}, volume={35}, url={https://ojs.aaai.org/index.php/AAAI/article/view/16795}, DOI={10.1609/aaai.v35i7.16795}, abstractNote={Lexical taxonomies, a special kind of knowledge graph, are essential for natural language understanding. This paper studies the problem of lexical taxonomy embedding. Most existing graph embedding methods are difficult to apply to lexical taxonomies since 1) they ignore implicit but important information, namely, sibling relations, which are not explicitly mentioned in lexical taxonomies and 2) there are lots of polysemous terms in lexical taxonomies. In this paper, we propose a novel method for lexical taxonomy embedding. This method optimizes an objective function that models both hyponym-hypernym relations and sibling relations. A term-level attention mechanism and a random walk based metric are then proposed to assist the modeling of these two kinds of relations, respectively. Finally, a novel training method based on curriculum learning is proposed. We conduct extensive experiments on two tasks to show that our approach outperforms other embedding methods and we use the learned term embeddings to enhance the performance of the state-of-the-art models that are based on BERT and RoBERTa on text classification.}, number={7}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Liu, Jingping and Wang, Menghui and Wang, Chao and Liang, Jiaqing and Chen, Lihan and Jiang, Haiyun and Xiao, Yanghua and Chen, Yunwen}, year={2021}, month={May}, pages={6410-6417} }