{ "id": "2310.20630", "version": "v1", "published": "2023-10-31T16:59:07.000Z", "updated": "2023-10-31T16:59:07.000Z", "title": "Projecting basis functions with tensor networks for Gaussian process regression", "authors": [ "Clara Menzen", "Eva Memmel", "Kim Batselier", "Manon Kok" ], "categories": [ "stat.ML", "cs.LG" ], "abstract": "This paper presents a method for approximate Gaussian process (GP) regression with tensor networks (TNs). A parametric approximation of a GP uses a linear combination of basis functions, where the accuracy of the approximation depends on the total number of basis functions $M$. We develop an approach that allows us to use an exponential amount of basis functions without the corresponding exponential computational complexity. The key idea to enable this is using low-rank TNs. We first find a suitable low-dimensional subspace from the data, described by a low-rank TN. In this low-dimensional subspace, we then infer the weights of our model by solving a Bayesian inference problem. Finally, we project the resulting weights back to the original space to make GP predictions. The benefit of our approach comes from the projection to a smaller subspace: It modifies the shape of the basis functions in a way that it sees fit based on the given data, and it allows for efficient computations in the smaller subspace. In an experiment with an 18-dimensional benchmark data set, we show the applicability of our method to an inverse dynamics problem.", "revisions": [ { "version": "v1", "updated": "2023-10-31T16:59:07.000Z" } ], "analyses": { "keywords": [ "gaussian process regression", "projecting basis functions", "tensor networks", "low-rank tn", "smaller subspace" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }