@article{Hu_Wang_Cheng_2018, title={From Hashing to CNNs: Training Binary Weight Networks via Hashing}, volume={32}, url={https://ojs.aaai.org/index.php/AAAI/article/view/11660}, DOI={10.1609/aaai.v32i1.11660}, abstractNote={ <p> Deep convolutional neural networks (CNNs) have shown appealing performance on various computer vision tasks in recent years. This motivates people to deploy CNNs to real-world applications. However, most of state-of-art CNNs require large memory and computational resources, which hinders the deployment on mobile devices. Recent studies show that low-bit weight representation can reduce much storage and memory demand, and also can achieve efficient network inference. To achieve this goal, we propose a novel approach named BWNH to train Binary Weight Networks via Hashing. In this paper, we first reveal the strong connection between inner-product preserving hashing and binary weight networks, and show that training binary weight networks can be intrinsically regarded as a hashing problem. Based on this perspective, we propose an alternating optimization method to learn the hash codes instead of directly learning binary weights. Extensive experiments on CIFAR10, CIFAR100 and ImageNet demonstrate that our proposed BWNH outperforms current state-of-art by a large margin. </p> }, number={1}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Hu, Qinghao and Wang, Peisong and Cheng, Jian}, year={2018}, month={Apr.} }