@article{Kabán_2019, title={Dimension-Free Error Bounds from Random Projections}, volume={33}, url={https://ojs.aaai.org/index.php/AAAI/article/view/4300}, DOI={10.1609/aaai.v33i01.33014049}, abstractNote={<p>Learning from high dimensional data is challenging in general – however, often the data is not truly high dimensional in the sense that it may have some hidden low complexity geometry. We give new, user-friendly PAC-bounds that are able to take advantage of such benign geometry to reduce dimensional-dependence of error-guarantees in settings where such dependence is known to be essential in general. This is achieved by employing random projection as an analytic tool, and exploiting its structure-preserving compression ability. We introduce an auxiliary function class that operates on reduced dimensional inputs, and a new complexity term, as the distortion of the loss under random projections. The latter is a hypothesis-dependent data-complexity, whose analytic estimates turn out to recover various regularisation schemes in parametric models, and a notion of intrinsic dimension, as quantified by the Gaussian width of the input support in the case of the nearest neighbour rule. If there is benign geometry present, then the bounds become tighter, otherwise they recover the original dimension-dependent bounds.</p>}, number={01}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Kabán, Ata}, year={2019}, month={Jul.}, pages={4049-4056} }