@article{Ren_Shi_Li_2020, title={Distill BERT to Traditional Models in Chinese Machine Reading Comprehension (Student Abstract)}, volume={34}, url={https://ojs.aaai.org/index.php/AAAI/article/view/7223}, DOI={10.1609/aaai.v34i10.7223}, abstractNote={<p>Recently, unsupervised representation learning has been extremely successful in the field of natural language processing. More and more pre-trained language models are proposed and achieved the most advanced results especially in machine reading comprehension. However, these proposed pre-trained language models are huge with hundreds of millions of parameters that have to be trained. It is quite time consuming to use them in actual industry. Thus we propose a method that employ a distillation traditional reading comprehension model to simplify the pre-trained language model so that the distillation model has faster reasoning speed and higher inference accuracy in the field of machine reading comprehension. We evaluate our proposed method on the Chinese machine reading comprehension dataset CMRC2018 and greatly improve the accuracy of the original model. To the best of our knowledge, we are the first to propose a method that employ the distillation pre-trained language model in Chinese machine reading comprehension.</p>}, number={10}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Ren, Xingkai and Shi, Ronghua and Li, Fangfang}, year={2020}, month={Apr.}, pages={13901-13902} }