@article{Xu_Liu_Geng_2020, title={Partial Multi-Label Learning with Label Distribution}, volume={34}, url={https://ojs.aaai.org/index.php/AAAI/article/view/6124}, DOI={10.1609/aaai.v34i04.6124}, abstractNote={<p>Partial multi-label learning (PML) aims to learn from training examples each associated with a set of candidate labels, among which only a subset are valid for the training example. The common strategy to induce predictive model is trying to disambiguate the candidate label set, such as identifying the ground-truth label via utilizing the confidence of each candidate label or estimating the noisy labels in the candidate label sets. Nonetheless, these strategies ignore considering the essential <em>label distribution</em> corresponding to each instance since the label distribution is not explicitly available in the training set. In this paper, a new partial multi-label learning strategy named P<span style="font-variant: small-caps;">ml</span>-<span style="font-variant: small-caps;">ld</span> is proposed to learn from partial multi-label examples via <em>label enhancement</em>. Specifically, label distributions are recovered by leveraging the topological information of the feature space and the correlations among the labels. After that, a multi-class predictive model is learned by fitting a regularized multi-output regressor with the recovered label distributions. Experimental results on synthetic as well as real-world datasets clearly validate the effectiveness of P<span style="font-variant: small-caps;">ml</span>-<span style="font-variant: small-caps;">ld</span> for solving PML problems.</p>}, number={04}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Xu, Ning and Liu, Yun-Peng and Geng, Xin}, year={2020}, month={Apr.}, pages={6510-6517} }