{ "id": "2112.08654", "version": "v2", "published": "2021-12-16T06:17:07.000Z", "updated": "2022-03-21T19:26:32.000Z", "title": "Learning to Prompt for Continual Learning", "authors": [ "Zifeng Wang", "Zizhao Zhang", "Chen-Yu Lee", "Han Zhang", "Ruoxi Sun", "Xiaoqi Ren", "Guolong Su", "Vincent Perot", "Jennifer Dy", "Tomas Pfister" ], "comment": "Published at CVPR 2022 as a conference paper", "categories": [ "cs.LG", "cs.CV" ], "abstract": "The mainstream paradigm behind continual learning has been to adapt the model parameters to non-stationary data distributions, where catastrophic forgetting is the central challenge. Typical methods rely on a rehearsal buffer or known task identity at test time to retrieve learned knowledge and address forgetting, while this work presents a new paradigm for continual learning that aims to train a more succinct memory system without accessing task identity at test time. Our method learns to dynamically prompt (L2P) a pre-trained model to learn tasks sequentially under different task transitions. In our proposed framework, prompts are small learnable parameters, which are maintained in a memory space. The objective is to optimize prompts to instruct the model prediction and explicitly manage task-invariant and task-specific knowledge while maintaining model plasticity. We conduct comprehensive experiments under popular image classification benchmarks with different challenging continual learning settings, where L2P consistently outperforms prior state-of-the-art methods. Surprisingly, L2P achieves competitive results against rehearsal-based methods even without a rehearsal buffer and is directly applicable to challenging task-agnostic continual learning. Source code is available at https://github.com/google-research/l2p.", "revisions": [ { "version": "v2", "updated": "2022-03-21T19:26:32.000Z" } ], "analyses": { "keywords": [ "continual learning", "popular image classification benchmarks", "rehearsal buffer", "task identity", "test time" ], "tags": [ "conference paper" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }