@article{Zhang_Wang_2020, title={Rception: Wide and Deep Interaction Networks for Machine Reading Comprehension (Student Abstract)}, volume={34}, url={https://ojs.aaai.org/index.php/AAAI/article/view/7266}, DOI={10.1609/aaai.v34i10.7266}, abstractNote={<p>Most of models for machine reading comprehension (MRC) usually focus on recurrent neural networks (RNNs) and attention mechanism, though convolutional neural networks (CNNs) are also involved for time efficiency. However, little attention has been paid to leverage CNNs and RNNs in MRC. For a deeper understanding, humans sometimes need local information for short phrases, sometimes need global context for long passages. In this paper, we propose a novel architecture, i.e., Rception, to capture and leverage both local deep information and global wide context. It fuses different kinds of networks and hyper-parameters horizontally rather than simply stacking them layer by layer vertically. Experiments on the Stanford Question Answering Dataset (SQuAD) show that our proposed architecture achieves good performance.</p>}, number={10}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Zhang, Xuanyu and Wang, Zhichun}, year={2020}, month={Apr.}, pages={13987-13988} }