@inproceedings{998, abstract = {A major open problem on the road to artificial intelligence is the development of incrementally learning systems that learn about more and more concepts over time from a stream of data. In this work, we introduce a new training strategy, iCaRL, that allows learning in such a class-incremental way: only the training data for a small number of classes has to be present at the same time and new classes can be added progressively. iCaRL learns strong classifiers and a data representation simultaneously. This distinguishes it from earlier works that were fundamentally limited to fixed data representations and therefore incompatible with deep learning architectures. We show by experiments on CIFAR-100 and ImageNet ILSVRC 2012 data that iCaRL can learn many classes incrementally over a long period of time where other strategies quickly fail. }, author = {Rebuffi, Sylvestre Alvise and Kolesnikov, Alexander and Sperl, Georg and Lampert, Christoph}, isbn = {978-153860457-1}, location = {Honolulu, HA, United States}, pages = {5533 -- 5542}, publisher = {IEEE}, title = {{iCaRL: Incremental classifier and representation learning}}, doi = {10.1109/CVPR.2017.587}, volume = {2017}, year = {2017}, }