{ "id": "1802.09188", "version": "v1", "published": "2018-02-26T07:50:57.000Z", "updated": "2018-02-26T07:50:57.000Z", "title": "Analysis of Langevin Monte Carlo via convex optimization", "authors": [ "Alain Durmus", "Szymon Majewski", "Błażej Miasojedow" ], "categories": [ "stat.CO", "stat.ML" ], "abstract": "In this paper, we provide new insights on the Unadjusted Langevin Algorithm. We show that this method can be formulated as a first order optimization algorithm of an objective functional defined on the Wasserstein space of order $2$. Using this interpretation and techniques borrowed from convex optimization, we give a non-asymptotic analysis of this method to sample from logconcave smooth target distribution on $\\mathbb{R}^d$. Our proofs are then easily extended to the Stochastic Gradient Langevin Dynamics, which is a popular extension of the Unadjusted Langevin Algorithm. Finally, this interpretation leads to a new methodology to sample from a non-smooth target distribution, for which a similar study is done.", "revisions": [ { "version": "v1", "updated": "2018-02-26T07:50:57.000Z" } ], "analyses": { "keywords": [ "langevin monte carlo", "convex optimization", "unadjusted langevin algorithm", "stochastic gradient langevin dynamics", "first order optimization algorithm" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }