{ "id": "2304.06619", "version": "v1", "published": "2023-04-13T15:40:41.000Z", "updated": "2023-04-13T15:40:41.000Z", "title": "Class-Incremental Learning of Plant and Disease Detection: Growing Branches with Knowledge Distillation", "authors": [ "Mathieu Pagé Fortin" ], "categories": [ "cs.CV" ], "abstract": "This paper investigates the problem of class-incremental object detection for agricultural applications where a model needs to learn new plant species and diseases incrementally without forgetting the previously learned ones. We adapt two public datasets to include new categories over time, simulating a more realistic and dynamic scenario. We then compare three class-incremental learning methods that leverage different forms of knowledge distillation to mitigate catastrophic forgetting. Our experiments show that all three methods suffer from catastrophic forgetting, but the recent Dynamic Y-KD approach, which additionally uses a dynamic architecture that grows new branches to learn new tasks, outperforms ILOD and Faster-ILOD in most scenarios both on new and old classes. These results highlight the challenges and opportunities of continual object detection for agricultural applications. In particular, the large intra-class and small inter-class variability that is typical of plant images exacerbate the difficulty of learning new categories without interfering with previous knowledge. We publicly release our code to encourage future work.", "revisions": [ { "version": "v1", "updated": "2023-04-13T15:40:41.000Z" } ], "analyses": { "keywords": [ "knowledge distillation", "class-incremental learning", "disease detection", "growing branches", "agricultural applications" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }