@article{Wei_Hu_Xing_Wang_Gao_2019, title={Translating with Bilingual Topic Knowledge for Neural Machine Translation}, volume={33}, url={https://ojs.aaai.org/index.php/AAAI/article/view/4711}, DOI={10.1609/aaai.v33i01.33017257}, abstractNote={<p>The dominant neural machine translation (NMT) models that based on the encoder-decoder architecture have recently achieved the state-of-the-art performance. Traditionally, the NMT models only depend on the representations learned during training for mapping a source sentence into the target domain. However, the learned representations often suffer from implicit and inadequately informed properties. In this paper, we propose a novel bilingual topic enhanced NMT (BLTNMT) model to improve translation performance by incorporating bilingual topic knowledge into NMT. Specifically, the bilingual topic knowledge is included into the hidden states of both encoder and decoder, as well as the attention mechanism. With this new setting, the proposed BLT-NMT has access to the background knowledge implied in bilingual topics which is beyond the sequential context, and enables the attention mechanism to attend to topic-level attentions for generating accurate target words during translation. Experimental results show that the proposed model consistently outperforms the traditional RNNsearch and the previous topic-informed NMT on Chinese-English and EnglishGerman translation tasks. We also introduce the bilingual topic knowledge into the newly emerged Transformer base model on English-German translation and achieve a notable improvement.</p>}, number={01}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Wei, Xiangpeng and Hu, Yue and Xing, Luxi and Wang, Yipeng and Gao, Li}, year={2019}, month={Jul.}, pages={7257-7264} }