{ "id": "1411.1134", "version": "v1", "published": "2014-11-05T03:05:43.000Z", "updated": "2014-11-05T03:05:43.000Z", "title": "Global Convergence of Stochastic Gradient Descent for Some Nonconvex Matrix Problems", "authors": [ "Christopher De Sa", "Kunle Olukotun", "Christopher RĂ©" ], "categories": [ "cs.LG", "math.OC", "stat.ML" ], "abstract": "The Burer-Monteiro decomposition ($X = Y Y^T$) with stochastic gradient descent is commonly employed to speed up and scale up matrix problems including matrix completion, subspace tracking, and SDP relaxation. Although it is widely used in practice, there exist no known global convergence results for this method. In this paper, we prove that, under broad sampling conditions, a first-order rank-1 stochastic gradient descent (SGD) matrix recovery scheme converges globally from a random starting point at a $O(\\epsilon^{-1} n \\log n)$ rate with constant probability. We demonstrate our method experimentally.", "revisions": [ { "version": "v1", "updated": "2014-11-05T03:05:43.000Z" } ], "analyses": { "keywords": [ "stochastic gradient descent", "nonconvex matrix problems", "global convergence results", "matrix recovery scheme converges" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2014arXiv1411.1134D" } } }