{ "id": "1902.02603", "version": "v1", "published": "2019-02-07T13:06:43.000Z", "updated": "2019-02-07T13:06:43.000Z", "title": "Radial and Directional Posteriors for Bayesian Neural Networks", "authors": [ "Changyong Oh", "Kamil Adamczewski", "Mijung Park" ], "comment": "17 pages, 9 figures", "categories": [ "stat.ML", "cs.LG" ], "abstract": "We propose a new variational family for Bayesian neural networks. We decompose the variational posterior into two components, where the radial component captures the strength of each neuron in terms of its magnitude; while the directional component captures the statistical dependencies among the weight parameters. The dependencies learned via the directional density provide better modeling performance compared to the widely-used Gaussian mean-field-type variational family. In addition, the strength of input and output neurons learned via the radial density provides a structured way to compress neural networks. Indeed, experiments show that our variational family improves predictive performance and yields compressed networks simultaneously.", "revisions": [ { "version": "v1", "updated": "2019-02-07T13:06:43.000Z" } ], "analyses": { "keywords": [ "bayesian neural networks", "directional posteriors", "gaussian mean-field-type variational family", "compress neural networks", "directional component captures" ], "note": { "typesetting": "TeX", "pages": 17, "language": "en", "license": "arXiv", "status": "editable" } } }