{ "id": "1906.07355", "version": "v1", "published": "2019-06-18T03:02:56.000Z", "updated": "2019-06-18T03:02:56.000Z", "title": "Escaping from saddle points on Riemannian manifolds", "authors": [ "Yue Sun", "Nicolas Flammarion", "Maryam Fazel" ], "comment": "submitted to NeurIPS 2019", "categories": [ "math.OC", "cs.LG", "stat.ML" ], "abstract": "We consider minimizing a nonconvex, smooth function $f$ on a Riemannian manifold $\\mathcal{M}$. We show that a perturbed version of Riemannian gradient descent algorithm converges to a second-order stationary point (and hence is able to escape saddle points on the manifold). The rate of convergence depends as $1/\\epsilon^2$ on the accuracy $\\epsilon$, which matches a rate known only for unconstrained smooth minimization. The convergence rate depends polylogarithmically on the manifold dimension $d$, hence is almost dimension-free. The rate also has a polynomial dependence on the parameters describing the curvature of the manifold and the smoothness of the function. While the unconstrained problem (Euclidean setting) is well-studied, our result is the first to prove such a rate for nonconvex, manifold-constrained problems.", "revisions": [ { "version": "v1", "updated": "2019-06-18T03:02:56.000Z" } ], "analyses": { "keywords": [ "riemannian manifold", "riemannian gradient descent algorithm converges", "second-order stationary point", "escape saddle points", "smooth function" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }