@article{Chen_Li_Chen_Zhou_Liu_2022, title={Diaformer: Automatic Diagnosis via Symptoms Sequence Generation}, volume={36}, url={https://ojs.aaai.org/index.php/AAAI/article/view/20365}, DOI={10.1609/aaai.v36i4.20365}, abstractNote={Automatic diagnosis has attracted increasing attention but remains challenging due to multi-step reasoning. Recent works usually address it by reinforcement learning methods. However, these methods show low efficiency and require task-specific reward functions. Considering the conversation between doctor and patient allows doctors to probe for symptoms and make diagnoses, the diagnosis process can be naturally seen as the generation of a sequence including symptoms and diagnoses. Inspired by this, we reformulate automatic diagnosis as a symptoms Sequence Generation (SG) task and propose a simple but effective automatic Diagnosis model based on Transformer (Diaformer). We firstly design the symptom attention framework to learn the generation of symptom inquiry and the disease diagnosis. To alleviate the discrepancy between sequential generation and disorder of implicit symptoms, we further design three orderless training mechanisms. Experiments on three public datasets show that our model outperforms baselines on disease diagnosis by 1%, 6% and 11.5% with the highest training efficiency. Detailed analysis on symptom inquiry prediction demonstrates that the potential of applying symptoms sequence generation for automatic diagnosis.}, number={4}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Chen, Junying and Li, Dongfang and Chen, Qingcai and Zhou, Wenxiu and Liu, Xin}, year={2022}, month={Jun.}, pages={4432-4440} }