@article{Liang_Guo_Chang_Chen_2018, title={Visual Relationship Detection With Deep Structural Ranking}, volume={32}, url={https://ojs.aaai.org/index.php/AAAI/article/view/12274}, DOI={10.1609/aaai.v32i1.12274}, abstractNote={ <p> Visual relationship detection aims to describe the interactions between pairs of objects. Different from individual object learning tasks, the number of possible relationships are much larger, which makes it hard to explore only based on the visual appearance of objects. In addition, due to the limited human effort, the annotations for visual relationships are usually incomplete which increases the difficulty of model training and evaluation. In this paper, we propose a novel framework, called Deep Structural Ranking, for visual relationship detection. To complement the representation ability of visual appearance, we integrate multiple cues for predicting the relationships contained in an input image. Moreover, we design a new ranking objective function by enforcing the annotated relationships to have higher relevance scores. Unlike previous works, our proposed method can both facilitate the co-occurrence of relationships and mitigate the incompleteness problem. Experimental results show that our proposed method outperforms the state-of-the-art on the two widely used datasets. We also demonstrate its superiority in detecting zero-shot relationships. </p> }, number={1}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Liang, Kongming and Guo, Yuhong and Chang, Hong and Chen, Xilin}, year={2018}, month={Apr.} }