{ "id": "2107.07343", "version": "v1", "published": "2021-07-04T15:15:36.000Z", "updated": "2021-07-04T15:15:36.000Z", "title": "Mutation is all you need", "authors": [ "Lennart Schneider", "Florian Pfisterer", "Martin Binder", "Bernd Bischl" ], "comment": "Accepted for the 8th ICML Workshop on Automated Machine Learning (2021). 10 pages, 1 table, 3 figures", "categories": [ "cs.LG", "cs.NE" ], "abstract": "Neural architecture search (NAS) promises to make deep learning accessible to non-experts by automating architecture engineering of deep neural networks. BANANAS is one state-of-the-art NAS method that is embedded within the Bayesian optimization framework. Recent experimental findings have demonstrated the strong performance of BANANAS on the NAS-Bench-101 benchmark being determined by its path encoding and not its choice of surrogate model. We present experimental results suggesting that the performance of BANANAS on the NAS-Bench-301 benchmark is determined by its acquisition function optimizer, which minimally mutates the incumbent.", "revisions": [ { "version": "v1", "updated": "2021-07-04T15:15:36.000Z" } ], "analyses": { "keywords": [ "neural architecture search", "acquisition function optimizer", "deep neural networks", "state-of-the-art nas method", "bayesian optimization framework" ], "note": { "typesetting": "TeX", "pages": 10, "language": "en", "license": "arXiv", "status": "editable" } } }