@article{Bjorck_Kabra_Weinberger_Gomes_2021, title={Characterizing the Loss Landscape in Non-Negative Matrix Factorization}, volume={35}, url={https://ojs.aaai.org/index.php/AAAI/article/view/16836}, DOI={10.1609/aaai.v35i8.16836}, abstractNote={Non-negative matrix factorization (NMF) is a highly celebrated algorithm for matrix decomposition that guarantees non-negative factors. The underlying optimization problem is computationally intractable, yet in practice, gradient-descent-based methods often find good solutions. In this paper, we revisit the NMF optimization problem and analyze its loss landscape in non-worst-case settings. It has recently been observed that gradients in deep networks tend to point towards the final minimizer throughout the optimization procedure. We show that a similar property holds (with high probability) for NMF, provably in a non-worst case model with a planted solution, and empirically across an extensive suite of real-world NMF problems. Our analysis predicts that this property becomes more likely with growing number of parameters, and experiments suggest that a similar trend might also hold for deep neural networks---turning increasing dataset sizes and model sizes into a blessing from an optimization perspective.}, number={8}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Bjorck, Johan and Kabra, Anmol and Weinberger, Kilian Q. and Gomes, Carla}, year={2021}, month={May}, pages={6768-6776} }