{ "id": "1111.5280", "version": "v4", "published": "2011-11-22T18:41:12.000Z", "updated": "2013-11-19T11:56:10.000Z", "title": "Stochastic gradient descent on Riemannian manifolds", "authors": [ "Silvere Bonnabel" ], "comment": "A slightly shorter version has been published in IEEE Transactions Automatic Control", "journal": "IEEE Transactions on Automatic Control, Vol 58 (9), pages 2217 - 2229, Sept 2013", "categories": [ "math.OC", "cs.LG", "stat.ML" ], "abstract": "Stochastic gradient descent is a simple approach to find the local minima of a cost function whose evaluations are corrupted by noise. In this paper, we develop a procedure extending stochastic gradient descent algorithms to the case where the function is defined on a Riemannian manifold. We prove that, as in the Euclidian case, the gradient descent algorithm converges to a critical point of the cost function. The algorithm has numerous potential applications, and is illustrated here by four examples. In particular a novel gossip algorithm on the set of covariance matrices is derived and tested numerically.", "revisions": [ { "version": "v4", "updated": "2013-11-19T11:56:10.000Z" } ], "analyses": { "keywords": [ "riemannian manifold", "cost function", "procedure extending stochastic gradient descent", "gradient descent algorithm converges", "extending stochastic gradient descent algorithms" ], "tags": [ "journal article" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2011arXiv1111.5280B" } } }