{ "id": "2107.07115", "version": "v1", "published": "2021-07-15T04:40:02.000Z", "updated": "2021-07-15T04:40:02.000Z", "title": "Principal component analysis for Gaussian process posteriors", "authors": [ "Hideaki Ishibashi", "Shotaro Akaho" ], "categories": [ "stat.ML", "cs.LG" ], "abstract": "This paper proposes an extension of principal component analysis for Gaussian process posteriors denoted by GP-PCA. Since GP-PCA estimates a low-dimensional space of GP posteriors, it can be used for meta-learning, which is a framework for improving the precision of a new task by estimating a structure of a set of tasks. The issue is how to define a structure of a set of GPs with an infinite-dimensional parameter, such as coordinate system and a divergence. In this study, we reduce the infiniteness of GP to the finite-dimensional case under the information geometrical framework by considering a space of GP posteriors that has the same prior. In addition, we propose an approximation method of GP-PCA based on variational inference and demonstrate the effectiveness of GP-PCA as meta-learning through experiments.", "revisions": [ { "version": "v1", "updated": "2021-07-15T04:40:02.000Z" } ], "analyses": { "keywords": [ "principal component analysis", "gaussian process posteriors", "gp posteriors", "gp-pca estimates", "low-dimensional space" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }