@article{Li_Chen_Xie_Yang_Yuan_Pu_Zhuang_2021, title={A Free Lunch for Unsupervised Domain Adaptive Object Detection without Source Data}, volume={35}, url={https://ojs.aaai.org/index.php/AAAI/article/view/17029}, DOI={10.1609/aaai.v35i10.17029}, abstractNote={Unsupervised domain adaptation (UDA) assumes that source and target domain data are freely available and usually trained together to reduce the domain gap. However, considering the data privacy and the inefficiency of data transmission, it is impractical in real scenarios. Hence, it draws our eyes to optimize the network in the target domain without accessing labeled source data. To explore this direction in object detection, for the first time, we propose a source data-free domain adaptive object detection (SFOD) framework via modeling it into a problem of learning with noisy labels. Generally, a straightforward method is to leverage the pre-trained network from the source domain to generate the pseudo labels for target domain optimization. However, it is difficult to evaluate the quality of pseudo labels since no labels are available in target domain. In this paper, self-entropy descent (SED) is a metric proposed to search an appropriate confidence threshold for reliable pseudo label generation without using any handcrafted labels. Nonetheless, completely clean labels are still unattainable. After a thorough experimental analysis, false negatives are found to dominate in the generated noisy labels. Undoubtedly, false negatives mining is helpful for performance improvement, and we ease it to false negatives simulation through data augmentation like Mosaic. Extensive experiments conducted in four representative adaptation tasks have demonstrated that the proposed framework can easily achieve state-of-the-art performance. From another view, it also reminds the UDA community that the labeled source data are not fully exploited in the existing methods.}, number={10}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Li, Xianfeng and Chen, Weijie and Xie, Di and Yang, Shicai and Yuan, Peng and Pu, Shiliang and Zhuang, Yueting}, year={2021}, month={May}, pages={8474-8481} }