@article{Chen_Wang_Shu_Wen_Xu_Shi_Xu_Xu_2020, title={Distilling Portable Generative Adversarial Networks for Image Translation}, volume={34}, url={https://ojs.aaai.org/index.php/AAAI/article/view/5765}, DOI={10.1609/aaai.v34i04.5765}, abstractNote={<p>Despite Generative Adversarial Networks (GANs) have been widely used in various image-to-image translation tasks, they can be hardly applied on mobile devices due to their heavy computation and storage cost. Traditional network compression methods focus on visually recognition tasks, but never deal with generation tasks. Inspired by knowledge distillation, a student generator of fewer parameters is trained by inheriting the low-level and high-level information from the original heavy teacher generator. To promote the capability of student generator, we include a student discriminator to measure the distances between real images, and images generated by student and teacher generators. An adversarial learning process is therefore established to optimize student generator and student discriminator. Qualitative and quantitative analysis by conducting experiments on benchmark datasets demonstrate that the proposed method can learn portable generative models with strong performance.</p>}, number={04}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Chen, Hanting and Wang, Yunhe and Shu, Han and Wen, Changyuan and Xu, Chunjing and Shi, Boxin and Xu, Chao and Xu, Chang}, year={2020}, month={Apr.}, pages={3585-3592} }