{ "id": "2101.11201", "version": "v1", "published": "2021-01-27T04:37:34.000Z", "updated": "2021-01-27T04:37:34.000Z", "title": "Similarity of Classification Tasks", "authors": [ "Cuong Nguyen", "Thanh-Toan Do", "Gustavo Carneiro" ], "comment": "Accepted at Neurips Meta-learning Workshop 2020", "categories": [ "cs.LG", "stat.ML" ], "abstract": "Recent advances in meta-learning has led to remarkable performances on several few-shot learning benchmarks. However, such success often ignores the similarity between training and testing tasks, resulting in a potential bias evaluation. We, therefore, propose a generative approach based on a variant of Latent Dirichlet Allocation to analyse task similarity to optimise and better understand the performance of meta-learning. We demonstrate that the proposed method can provide an insightful evaluation for meta-learning algorithms on two few-shot classification benchmarks that matches common intuition: the more similar the higher performance. Based on this similarity measure, we propose a task-selection strategy for meta-learning and show that it can produce more accurate classification results than methods that randomly select training tasks.", "revisions": [ { "version": "v1", "updated": "2021-01-27T04:37:34.000Z" } ], "analyses": { "keywords": [ "classification tasks", "analyse task similarity", "latent dirichlet allocation", "potential bias evaluation", "performance" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }