@article{Yan_Han_He_Sun_2020, title={End-to-End Bootstrapping Neural Network for Entity Set Expansion}, volume={34}, url={https://ojs.aaai.org/index.php/AAAI/article/view/6482}, DOI={10.1609/aaai.v34i05.6482}, abstractNote={<p>Bootstrapping for entity set expansion (ESE) has long been modeled as a multi-step pipelined process. Such a paradigm, unfortunately, often suffers from two main challenges: 1) the entities are expanded in multiple separate steps, which tends to introduce noisy entities and results in the semantic drift problem; 2) it is hard to exploit the high-order entity-pattern relations for entity set expansion. In this paper, we propose an end-to-end bootstrapping neural network for entity set expansion, named BootstrapNet, which models the bootstrapping in an encoder-decoder architecture. In the encoding stage, a graph attention network is used to capture both the first- and the high-order relations between entities and patterns, and encode useful information into their representations. In the decoding stage, the entities are sequentially expanded through a recurrent neural network, which outputs entities at each stage, and its hidden state vectors, representing the target category, are updated at each expansion step. Experimental results demonstrate substantial improvement of our model over previous ESE approaches.</p>}, number={05}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Yan, Lingyong and Han, Xianpei and He, Ben and Sun, Le}, year={2020}, month={Apr.}, pages={9402-9409} }