@article{Liang_Zhang_Zhou_Li_Hu_2022, title={One More Check: Making “Fake Background” Be Tracked Again}, volume={36}, url={https://ojs.aaai.org/index.php/AAAI/article/view/20045}, DOI={10.1609/aaai.v36i2.20045}, abstractNote={The one-shot multi-object tracking, which integrates object detection and ID embedding extraction into a unified network, has achieved groundbreaking results in recent years. However, current one-shot trackers solely rely on single-frame detections to predict candidate bounding boxes, which may be unreliable when facing disastrous visual degradation, e.g., motion blur, occlusions. Once a target bounding box is mistakenly classified as background by the detector, the temporal consistency of its corresponding tracklet will be no longer maintained. In this paper, we set out to restore the bounding boxes misclassified as ``fake background’’ by proposing a re-check network. The re-check network innovatively expands the role of ID embedding from data association to motion forecasting by effectively propagating previous tracklets to the current frame with a small overhead. Note that the propagation results are yielded by an independent and efficient embedding search, preventing the model from over-relying on detection results. Eventually, it helps to reload the ``fake background’’ and repair the broken tracklets. Building on a strong baseline CSTrack, we construct a new one-shot tracker and achieve favorable gains by 70.7 ➡ 76.4, 70.6 ➡ 76.3 MOTA on MOT16 and MOT17, respectively. It also reaches a new state-of-the-art MOTA and IDF1 performance. Code is released at https://github.com/JudasDie/SOTS.}, number={2}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Liang, Chao and Zhang, Zhipeng and Zhou, Xue and Li, Bing and Hu, Weiming}, year={2022}, month={Jun.}, pages={1546-1554} }