{ "id": "2103.00815", "version": "v1", "published": "2021-03-01T07:30:31.000Z", "updated": "2021-03-01T07:30:31.000Z", "title": "Computation complexity of deep ReLU neural networks in high-dimensional approximation", "authors": [ "Dinh Dũng", "Van Kien Nguyen", "Mai Xuan Thao" ], "comment": "30 pages. arXiv admin note: text overlap with arXiv:2007.08729", "categories": [ "math.NA", "cs.NA" ], "abstract": "The purpose of the present paper is to study the computation complexity of deep ReLU neural networks to approximate functions in H\\\"older-Nikol'skii spaces of mixed smoothness $H_\\infty^\\alpha(\\mathbb{I}^d)$ on the unit cube $\\mathbb{I}^d:=[0,1]^d$. In this context, for any function $f\\in H_\\infty^\\alpha(\\mathbb{I}^d)$, we explicitly construct nonadaptive and adaptive deep ReLU neural networks having an output that approximates $f$ with a prescribed accuracy $\\varepsilon$, and prove dimension-dependent bounds for the computation complexity of this approximation, characterized by the size and the depth of this deep ReLU neural network, explicitly in $d$ and $\\varepsilon$. Our results show the advantage of the adaptive method of approximation by deep ReLU neural networks over nonadaptive one.", "revisions": [ { "version": "v1", "updated": "2021-03-01T07:30:31.000Z" } ], "analyses": { "keywords": [ "computation complexity", "high-dimensional approximation", "adaptive deep relu neural networks", "dimension-dependent bounds", "unit cube" ], "note": { "typesetting": "TeX", "pages": 30, "language": "en", "license": "arXiv", "status": "editable" } } }