@article{Futami_Cui_Sato_Sugiyama_2019, title={Bayesian Posterior Approximation via Greedy Particle Optimization}, volume={33}, url={https://ojs.aaai.org/index.php/AAAI/article/view/4241}, DOI={10.1609/aaai.v33i01.33013606}, abstractNote={<p>In Bayesian inference, the posterior distributions are difficult to obtain analytically for complex models such as neural networks. Variational inference usually uses a parametric distribution for approximation, from which we can easily draw samples. Recently discrete approximation by particles has attracted attention because of its high expression ability. An example is Stein variational gradient descent (SVGD), which iteratively optimizes particles. Although SVGD has been shown to be computationally efficient empirically, its theoretical properties have not been clarified yet and no finite sample bound of the convergence rate is known. Another example is the Stein points (SP) method, which minimizes kernelized Stein discrepancy directly. Althoughafinitesampleboundisassuredtheoretically, SP is computationally inefficient empirically, especially in high-dimensional problems. In this paper, we propose a novel method named <em>maximum mean discrepancy minimization by the Frank-Wolfe algorithm (MMD-FW)</em>, which minimizes MMD in a greedy way by the FW algorithm. Our method is computationally efficient empirically and we show that its finite sample convergence bound is in a linear order in finite dimensions.</p>}, number={01}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Futami, Futoshi and Cui, Zhenghang and Sato, Issei and Sugiyama, Masashi}, year={2019}, month={Jul.}, pages={3606-3613} }