{ "id": "2205.07764", "version": "v1", "published": "2022-05-16T15:42:25.000Z", "updated": "2022-05-16T15:42:25.000Z", "title": "On the inability of Gaussian process regression to optimally learn compositional functions", "authors": [ "Matteo Giordano", "Kolyan Ray", "Johannes Schmidt-Hieber" ], "comment": "24 pages", "categories": [ "stat.ML", "cs.LG", "math.ST", "stat.TH" ], "abstract": "We rigorously prove that deep Gaussian process priors can outperform Gaussian process priors if the target function has a compositional structure. To this end, we study information-theoretic lower bounds for posterior contraction rates for Gaussian process regression in a continuous regression model. We show that if the true function is a generalized additive function, then the posterior based on any mean-zero Gaussian process can only recover the truth at a rate that is strictly slower than the minimax rate by a factor that is polynomially suboptimal in the sample size $n$.", "revisions": [ { "version": "v1", "updated": "2022-05-16T15:42:25.000Z" } ], "analyses": { "keywords": [ "gaussian process regression", "optimally learn compositional functions", "study information-theoretic lower bounds", "outperform gaussian process priors", "deep gaussian process priors" ], "note": { "typesetting": "TeX", "pages": 24, "language": "en", "license": "arXiv", "status": "editable" } } }