@article{Grinberg_Aboutalebi_Lyman-Abramovitch_Balle_Precup_2018, title={Learning Predictive State Representations From Non-Uniform Sampling}, volume={32}, url={https://ojs.aaai.org/index.php/AAAI/article/view/11744}, DOI={10.1609/aaai.v32i1.11744}, abstractNote={ <p> Predictive state representations (PSR) have emerged as a powerful method for modelling partially observable environments. PSR learning algorithms can build models for predicting all observable variables, or predicting only some of them conditioned on others (e.g., actions or exogenous variables). In the latter case, which we call conditional modelling, the accuracy of different estimates of the conditional probabilities for a fixed dataset can vary significantly, due to the limited sampling of certain conditions. This can have negative consequences on the PSR parameter estimation process, which are not taken into account by the current state-of-the-art PSR spectral learning algorithms. In this paper, we examine closely conditional modelling within the PSR framework. We first establish a new positive but surprisingly non-trivial result: a conditional model can never be larger than the complete model. Then, we address the core shortcoming of existing PSR spectral learning methods for conditional models by incorporating an additional step in the process, which can be seen as a type of matrix denoising. We further refine this objective by adding penalty terms for violations of the system dynamics matrix structure, which improves the PSR predictive performance. Empirical evaluations on both synthetic and real datasets highlight the advantages of the proposed approach. </p> }, number={1}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Grinberg, Yuri and Aboutalebi, Hossein and Lyman-Abramovitch, Melanie and Balle, Borja and Precup, Doina}, year={2018}, month={Apr.} }