@article{Liu_Zhao_Chen_Wang_Jiang_2022, title={SpikeConverter: An Efficient Conversion Framework Zipping the Gap between Artificial Neural Networks and Spiking Neural Networks}, volume={36}, url={https://ojs.aaai.org/index.php/AAAI/article/view/20061}, DOI={10.1609/aaai.v36i2.20061}, abstractNote={Spiking Neural Networks (SNNs) have recently attracted enormous research interest since their event-driven and brain-inspired structure enables low-power computation. In image recognition tasks, the best results are achieved by SNN so far utilizing ANN-SNN conversion methods that replace activation functions in artificial neural networks~(ANNs) with integrate-and-fire neurons. Compared to source ANNs, converted SNNs usually suffer from accuracy loss and require a considerable number of time steps to achieve competitive accuracy. We find that the performance degradation of converted SNN stems from the fact that the information capacity of spike trains in transferred networks is smaller than that of activation values in source ANN, resulting in less information being passed during SNN inference. To better correlate ANN and SNN for better performance, we propose a conversion framework to mitigate the gap between the activation value of source ANN and the generated spike train of target SNN. The conversion framework originates from exploring an identical relation in the conversion and exploits temporal separation scheme and novel neuron model for the relation to hold. We demonstrate almost lossless ANN-SNN conversion using SpikeConverter for VGG-16, ResNet-20/34, and MobileNet-v2 SNNs on challenging datasets including CIFAR-10, CIFAR-100, and ImageNet. Our results also show that SpikeConverter achieves the abovementioned accuracy across different network architectures and datasets using 32X - 512X fewer inference time-steps than state-of-the-art ANN-SNN conversion methods.}, number={2}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Liu, Fangxin and Zhao, Wenbo and Chen, Yongbiao and Wang, Zongwu and Jiang, Li}, year={2022}, month={Jun.}, pages={1692-1701} }