{ "id": "1710.02950", "version": "v1", "published": "2017-10-09T06:16:50.000Z", "updated": "2017-10-09T06:16:50.000Z", "title": "Maximum Regularized Likelihood Estimators: A General Prediction Theory and Applications", "authors": [ "Rui Zhuang", "Johannes Lederer" ], "categories": [ "stat.ML", "math.ST", "stat.TH" ], "abstract": "Maximum regularized likelihood estimators (MRLEs) are arguably the most established class of estimators in high-dimensional statistics. In this paper, we derive guarantees for MRLEs in Kullback-Leibler divergence, a general measure of prediction accuracy. We assume only that the densities have a convex parametrization and that the regularization is definite and positive homogenous. The results thus apply to a very large variety of models and estimators, such as tensor regression and graphical models with convex and non-convex regularized methods. A main conclusion is that MRLEs are broadly consistent in prediction - regardless of whether restricted eigenvalues or similar conditions hold.", "revisions": [ { "version": "v1", "updated": "2017-10-09T06:16:50.000Z" } ], "analyses": { "keywords": [ "maximum regularized likelihood estimators", "general prediction theory", "applications", "similar conditions hold", "general measure" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }