@article{Wang_Hua_Kodirov_Hu_Robertson_2019, title={Deep Metric Learning by Online Soft Mining and Class-Aware Attention}, volume={33}, url={https://ojs.aaai.org/index.php/AAAI/article/view/4474}, DOI={10.1609/aaai.v33i01.33015361}, abstractNote={<p>Deep metric learning aims to learn a deep embedding that can capture the semantic similarity of data points. Given the availability of massive training samples, deep metric learning is known to suffer from slow convergence due to a large fraction of trivial samples. Therefore, most existing methods generally resort to sample mining strategies for selecting nontrivial samples to accelerate convergence and improve performance. In this work, we identify two critical limitations of the sample mining methods, and provide solutions for both of them. First, previous mining methods assign one binary score to each sample, i.e., dropping or keeping it, so they only selects a subset of relevant samples in a mini-batch. Therefore, we propose a novel sample mining method, called Online Soft Mining (OSM), which assigns one continuous score to each sample to make use of all samples in the mini-batch. OSM learns extended manifolds that preserve useful intraclass variances by focusing on more similar positives. Second, the existing methods are easily influenced by outliers as they are generally included in the mined subset. To address this, we introduce Class-Aware Attention (CAA) that assigns little attention to abnormal data samples. Furthermore, by combining OSM and CAA, we propose a novel weighted contrastive loss to learn discriminative embeddings. Extensive experiments on two fine-grained visual categorisation datasets and two video-based person re-identification benchmarks show that our method significantly outperforms the state-of-the-art.</p>}, number={01}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Wang, Xinshao and Hua, Yang and Kodirov, Elyor and Hu, Guosheng and Robertson, Neil M.}, year={2019}, month={Jul.}, pages={5361-5368} }