@article{Wang_Wang_Lin_Dong_Tao_Khan_2020, title={Few Sample Learning without Data Storage for Lifelong Stream Mining (Student Abstract)}, volume={34}, url={https://ojs.aaai.org/index.php/AAAI/article/view/7253}, DOI={10.1609/aaai.v34i10.7253}, abstractNote={<p>Continuously mining complexity data stream has recently been attracting an increasing amount of attention, due to the rapid growth of real-world vision/signal applications such as self-driving cars and online social media messages. In this paper, we aim to address two significant problems in the lifelong/incremental stream mining scenario: first, how to make the learning algorithms generalize to the unseen classes only from a few labeled samples; second, is it possible to avoid storing instances from previously seen classes to solve the catastrophic forgetting problem? We introduce a novelty stream mining framework to classify the infinite stream of data with different categories that occurred during different times. We apply a few-sample learning strategy to make the model recognize the novel class with limited samples; at the same time, we implement an incremental generative model to maintain old knowledge when learning new coming categories, and also avoid the violation of data privacy and memory restrictions simultaneously. We evaluate our approach in the continual class-incremental setup on the classification tasks and ensure the sufficient model capacity to accommodate for learning the new incoming categories.</p>}, number={10}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Wang, Zhuoyi and Wang, Yigong and Lin, Yu and Dong, Bo and Tao, Hemeng and Khan, Latifur}, year={2020}, month={Apr.}, pages={13961-13962} }