{ "id": "2102.08668", "version": "v1", "published": "2021-02-17T10:19:26.000Z", "updated": "2021-02-17T10:19:26.000Z", "title": "Non-asymptotic approximations of neural networks by Gaussian processes", "authors": [ "Ronen Eldan", "Dan Mikulincer", "Tselil Schramm" ], "comment": "18 pages", "categories": [ "math.PR", "cs.LG", "stat.ML" ], "abstract": "We study the extent to which wide neural networks may be approximated by Gaussian processes when initialized with random weights. It is a well-established fact that as the width of a network goes to infinity, its law converges to that of a Gaussian process. We make this quantitative by establishing explicit convergence rates for the central limit theorem in an infinite-dimensional functional space, metrized with a natural transportation distance. We identify two regimes of interest; when the activation function is polynomial, its degree determines the rate of convergence, while for non-polynomial activations, the rate is governed by the smoothness of the function.", "revisions": [ { "version": "v1", "updated": "2021-02-17T10:19:26.000Z" } ], "analyses": { "keywords": [ "gaussian process", "non-asymptotic approximations", "infinite-dimensional functional space", "natural transportation distance", "wide neural networks" ], "note": { "typesetting": "TeX", "pages": 18, "language": "en", "license": "arXiv", "status": "editable" } } }