@article{Yang_She_Lai_Yang_2018, title={Retrieving and Classifying Affective Images via Deep Metric Learning}, volume={32}, url={https://ojs.aaai.org/index.php/AAAI/article/view/11275}, DOI={10.1609/aaai.v32i1.11275}, abstractNote={ <p> Affective image understanding has been extensively studied in the last decade since more and more users express emotion via visual contents. While current algorithms based on convolutional neural networks aim to distinguish emotional categories in a discrete label space, the task is inherently ambiguous. This is mainly because emotional labels with the same polarity (i.e., positive or negative) are highly related, which is different from concrete object concepts such as cat, dog and bird. To the best of our knowledge, few methods focus on leveraging such characteristic of emotions for affective image understanding. In this work, we address the problem of understanding affective images via deep metric learning and propose a multi-task deep framework to optimize both retrieval and classification goals. We propose the sentiment constraints adapted from the triplet constraints, which are able to explore the hierarchical relation of emotion labels. We further exploit the sentiment vector as an effective representation to distinguish affective images utilizing the texture representation derived from convolutional layers. Extensive evaluations on four widely-used affective datasets, i.e., Flickr and Instagram, IAPSa, Art Photo, and Abstract Paintings, demonstrate that the proposed algorithm performs favorably against the state-of-the-art methods on both affective image retrieval and classification tasks. </p> }, number={1}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Yang, Jufeng and She, Dongyu and Lai, Yu-Kun and Yang, Ming-Hsuan}, year={2018}, month={Apr.} }