@article{Shao_Gong_Qi_Cao_Ji_Lin_2020, title={Graph-Based Transformer with Cross-Candidate Verification for Semantic Parsing}, volume={34}, url={https://ojs.aaai.org/index.php/AAAI/article/view/6408}, DOI={10.1609/aaai.v34i05.6408}, abstractNote={<p>In this paper, we present a graph-based Transformer for semantic parsing. We separate the semantic parsing task into two steps: 1) Use a sequence-to-sequence model to generate the logical form candidates. 2) Design a graph-based Transformer to rerank the candidates. To handle the structure of logical forms, we incorporate graph information to Transformer, and design a cross-candidate verification mechanism to consider all the candidates in the ranking process. Furthermore, we integrate BERT into our model and jointly train the graph-based Transformer and BERT. We conduct experiments on 3 semantic parsing benchmarks, ATIS, JOBS and Task Oriented semantic Parsing dataset (TOP). Experiments show that our graph-based reranking model achieves results comparable to state-of-the-art models on the ATIS and JOBS datasets. And on the TOP dataset, our model achieves a new state-of-the-art result.</p>}, number={05}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Shao, Bo and Gong, Yeyun and Qi, Weizhen and Cao, Guihong and Ji, Jianshu and Lin, Xiaola}, year={2020}, month={Apr.}, pages={8807-8814} }