@article{Jia_Cao_Shi_Fang_Yin_Wang_2021, title={Flexible Non-Autoregressive Extractive Summarization with Threshold: How to Extract a Non-Fixed Number of Summary Sentences}, volume={35}, url={https://ojs.aaai.org/index.php/AAAI/article/view/17552}, DOI={10.1609/aaai.v35i14.17552}, abstractNote={Sentence-level extractive summarization is a fundamental yet challenging task, and recent powerful approaches prefer to pick sentences sorted by the predicted probabilities until the length limit is reached, a.k.a. ``Top-K Strategy’’. This length limit is fixed based on the validation set, resulting in the lack of flexibility. In this work, we propose a more flexible and accurate non-autoregressive method for single document extractive summarization, extracting a non-fixed number of summary sentences without the sorting step. We call our approach ThresSum as it picks sentences simultaneously and individually from the source document when the predicted probabilities exceed a threshold. During training, the model enhances sentence representation through iterative refinement and the intermediate latent variables receive some weak supervision with soft labels, which are generated progressively by adjusting the temperature with a knowledge distillation algorithm. Specifically, the temperature is initialized with high value and drops along with the iteration until a temperature of 1. Experimental results on CNN/DM and NYT datasets have demonstrated the effectiveness of ThresSum, which significantly outperforms BERTSUMEXT with a substantial improvement of 0.74 ROUGE-1 score on CNN/DM. Our source code will be available on Github.}, number={14}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Jia, Ruipeng and Cao, Yanan and Shi, Haichao and Fang, Fang and Yin, Pengfei and Wang, Shi}, year={2021}, month={May}, pages={13134-13142} }