@article{Liu_Liu_Wang_Zhou_Yang_2021, title={Hierarchical Multiple Kernel Clustering}, volume={35}, url={https://ojs.aaai.org/index.php/AAAI/article/view/17051}, DOI={10.1609/aaai.v35i10.17051}, abstractNote={Current multiple kernel clustering algorithms compute a partition with the consensus kernel or graph learned from the pre-specified ones, while the emerging late fusion methods firstly construct multiple partitions from each kernel separately, and then obtain a consensus one with them. However, both of them directly distill the clustering information from kernels or graphs to partition matrices, where the sudden dimension drop would result in loss of advantageous details for clustering. In this paper, we provide a brief insight of the aforementioned issue and propose a hierarchical approach to perform clustering while preserving advantageous details maximumly. Specifically, we gradually group samples into fewer clusters, together with generating a sequence of intermediary matrices of descending sizes. The consensus partition with is simultaneously learned and conversely guides the construction of intermediary matrices. Nevertheless, this cyclic process is modeled into an unified objective and an alternative algorithm is designed to solve it. In addition, the proposed method is validated and compared with other representative multiple kernel clustering algorithms on benchmark datasets, demonstrating state-of-the-art performance by a large margin.}, number={10}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Liu, Jiyuan and Liu, Xinwang and Wang, Siwei and Zhou, Sihang and Yang, Yuexiang}, year={2021}, month={May}, pages={8671-8679} }