{ "id": "1411.1810", "version": "v1", "published": "2014-11-07T01:28:41.000Z", "updated": "2014-11-07T01:28:41.000Z", "title": "Deterministic Annealing for Stochastic Variational Inference", "authors": [ "Farhan Abrol", "Stephan Mandt", "Rajesh Ranganath", "David Blei" ], "comment": "8 pages, 5 figures", "categories": [ "stat.ML", "cs.LG" ], "abstract": "Stochastic variational inference (SVI) maps posterior inference in latent variable models to non-convex stochastic optimization. While they enable approximate posterior inference for many otherwise intractable models, variational inference methods suffer from local optima. We introduce deterministic annealing for SVI to overcome this issue. We introduce a temperature parameter that deterministically deforms the objective, and then reduce this parameter over the course of the optimization. Initially it encourages high entropy variational distributions, which we find eases convergence to better optima. We test our method with Latent Dirichlet Allocation on three large corpora. Compared to SVI, we show improved predictive likelihoods on held-out data.", "revisions": [ { "version": "v1", "updated": "2014-11-07T01:28:41.000Z" } ], "analyses": { "keywords": [ "stochastic variational inference", "deterministic annealing", "encourages high entropy variational distributions", "variational inference methods suffer", "latent dirichlet allocation" ], "note": { "typesetting": "TeX", "pages": 8, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2014arXiv1411.1810M" } } }