@article{Eshratifar_Abrishami_Eigen_Pedram_2019, title={A Meta-Learning Approach for Custom Model Training}, volume={33}, url={https://ojs.aaai.org/index.php/AAAI/article/view/5105}, DOI={10.1609/aaai.v33i01.33019937}, abstractNote={<p>Transfer-learning and meta-learning are two effective methods to apply knowledge learned from large data sources to new tasks. In few-class, few-shot target task settings (i.e. when there are only a few classes and training examples available in the target task), meta-learning approaches that optimize for future task learning have outperformed the typical transfer approach of initializing model weights from a pretrained starting point. But as we experimentally show, metalearning algorithms that work well in the few-class setting do not generalize well in many-shot and many-class cases. In this paper, we propose a joint training approach that combines both transfer-learning and meta-learning. Benefiting from the advantages of each, our method obtains improved generalization performance on unseen target tasks in both few- and many-class and few- and many-shot scenarios.</p>}, number={01}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Eshratifar, Amir Erfan and Abrishami, Mohammad Saeed and Eigen, David and Pedram, Massoud}, year={2019}, month={Jul.}, pages={9937-9938} }