@article{Costen_Rigter_Lacerda_Hawes_2023, title={Planning with Hidden Parameter Polynomial MDPs}, volume={37}, url={https://ojs.aaai.org/index.php/AAAI/article/view/26411}, DOI={10.1609/aaai.v37i10.26411}, abstractNote={For many applications of Markov Decision Processes (MDPs), the transition function cannot be specified exactly. Bayes-Adaptive MDPs (BAMDPs) extend MDPs to consider transition probabilities governed by latent parameters. To act optimally in BAMDPs, one must maintain a belief distribution over the latent parameters. Typically, this distribution is described by a set of sample (particle) MDPs, and associated weights which represent the likelihood of a sample MDP being the true underlying MDP. However, as the number of dimensions of the latent parameter space increases, the number of sample MDPs required to sufficiently represent the belief distribution grows exponentially. Thus, maintaining an accurate belief in the form of a set of sample MDPs over complex latent spaces is computationally intensive, which in turn affects the performance of planning for these models. In this paper, we propose an alternative approach for maintaining the belief over the latent parameters. We consider a class of BAMDPs where the transition probabilities can be expressed in closed form as a polynomial of the latent parameters, and outline a method to maintain a closed-form belief distribution for the latent parameters which results in an accurate belief representation. Furthermore, the closed-form representation does away with the need to tune the number of sample MDPs required to represent the belief. We evaluate two domains and empirically show that the polynomial, closed-form, belief representation results in better plans than a sampling-based belief representation.}, number={10}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Costen, Clarissa and Rigter, Marc and Lacerda, Bruno and Hawes, Nick}, year={2023}, month={Jun.}, pages={11963-11971} }