{ "id": "1612.01474", "version": "v1", "published": "2016-12-05T18:54:43.000Z", "updated": "2016-12-05T18:54:43.000Z", "title": "Simple and Scalable Predictive Uncertainty Estimation using Deep Ensembles", "authors": [ "Balaji Lakshminarayanan", "Alexander Pritzel", "Charles Blundell" ], "categories": [ "stat.ML", "cs.LG" ], "abstract": "Deep neural networks are powerful black box predictors that have recently achieved impressive performance on a wide spectrum of tasks. Quantifying predictive uncertainty in neural networks is a challenging and yet unsolved problem. Bayesian neural networks, which learn a distribution over weights, are currently the state-of-the-art for estimating predictive uncertainty; however these require significant modifications to the training procedure and are computationally expensive compared to standard (non-Bayesian) neural neural networks. We propose an alternative to Bayesian neural networks, that is simple to implement, readily parallelisable and yields high quality predictive uncertainty estimates. Through a series of experiments on classification and regression benchmarks, we demonstrate that our method produces well-calibrated uncertainty estimates which are as good or better than approximate Bayesian neural networks. Finally, we evaluate the predictive uncertainty on test examples from known and unknown classes, and show that our method is able to express higher degree of uncertainty on unknown classes, unlike existing methods which make overconfident predictions even on unknown classes.", "revisions": [ { "version": "v1", "updated": "2016-12-05T18:54:43.000Z" } ], "analyses": { "keywords": [ "scalable predictive uncertainty estimation", "bayesian neural networks", "deep ensembles", "produces well-calibrated uncertainty estimates", "high quality predictive uncertainty" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }