@article{Chen_Gong_Xu_Wang_Zhang_Du_2020, title={Compressed Self-Attention for Deep Metric Learning}, volume={34}, url={https://ojs.aaai.org/index.php/AAAI/article/view/5762}, DOI={10.1609/aaai.v34i04.5762}, abstractNote={<p>In this paper, we aim to enhance self-attention (SA) mechanism for deep metric learning in visual perception, by capturing richer contextual dependencies in visual data. To this end, we propose a novel module, named <em>compressed self-attention (CSA)</em>, which significantly reduces the computation and memory cost with a neglectable decrease in accuracy with respect to the original SA mechanism, thanks to the following two characteristics: i) it only needs to compute a small number of base attention maps for a small number of base feature vectors; and ii) the output at each spatial location can be simply obtained by an adaptive weighted average of the outputs calculated from the base attention maps. The high computational efficiency of CSA enables the application to high-resolution shallow layers in convolutional neural networks with little additional cost. In addition, CSA makes it practical to further partition the feature maps into groups along the channel dimension and compute attention maps for features in each group separately, thus increasing the diversity of long-range dependencies and accordingly boosting the accuracy. We evaluate the performance of CSA via extensive experiments on two metric learning tasks: person re-identification and local descriptor learning. Qualitative and quantitative comparisons with latest methods demonstrate the significance of CSA in this topic.</p>}, number={04}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Chen, Ziye and Gong, Mingming and Xu, Yanwu and Wang, Chaohui and Zhang, Kun and Du, Bo}, year={2020}, month={Apr.}, pages={3561-3568} }