{ "id": "2304.10552", "version": "v1", "published": "2023-04-20T08:45:16.000Z", "updated": "2023-04-20T08:45:16.000Z", "title": "Interpolation property of shallow neural networks", "authors": [ "Vlad-Raul Constantinescu", "Ionel Popescu" ], "categories": [ "cs.LG", "math.OC", "math.PR", "stat.ML" ], "abstract": "We study the geometry of global minima of the loss landscape of overparametrized neural networks. In most optimization problems, the loss function is convex, in which case we only have a global minima, or nonconvex, with a discrete number of global minima. In this paper, we prove that in the overparametrized regime, a shallow neural network can interpolate any data set, i.e. the loss function has a global minimum value equal to zero as long as the activation function is not a polynomial of small degree. Additionally, if such a global minimum exists, then the locus of global minima has infinitely many points. Furthermore, we give a characterization of the Hessian of the loss function evaluated at the global minima, and in the last section, we provide a practical probabilistic method of finding the interpolation point.", "revisions": [ { "version": "v1", "updated": "2023-04-20T08:45:16.000Z" } ], "analyses": { "keywords": [ "shallow neural network", "interpolation property", "loss function", "global minimum value equal", "interpolation point" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }