{ "id": "1304.5245", "version": "v2", "published": "2013-04-18T20:25:15.000Z", "updated": "2015-12-24T22:09:57.000Z", "title": "Feature Elimination in Kernel Machines in moderately high dimensions", "authors": [ "Sayan Dasgupta", "Yair Goldberg", "Michael Kosorok" ], "comment": "50 pages, 5 figures, submitted to Annals of Statistics", "categories": [ "stat.ML" ], "abstract": "We develop an approach for feature elimination in statistical learning with kernel machines, based on recursive elimination of features.We present theoretical properties of this method and show that it is uniformly consistent in finding the correct feature space under certain generalized assumptions.We present four case studies to show that the assumptions are met in most practical situations and present simulation results to demonstrate performance of the proposed approach.", "revisions": [ { "version": "v1", "updated": "2013-04-18T20:25:15.000Z", "title": "Feature Elimination in empirical risk minimization and support vector machines", "abstract": "We develop an approach for feature elimination in empirical risk minimization and support vector machines, based on recursive elimination of features. We present theoretical properties of this method and show that this is uniformly consistent in finding the correct feature space under certain generalized assumptions. We present case studies to show that the assumptions are met in most practical situations and also present simulation studies to demonstrate performance of the proposed approach.", "comment": "43 pages, 3 figures, submitted to Annals of Statistics", "journal": null, "doi": null }, { "version": "v2", "updated": "2015-12-24T22:09:57.000Z" } ], "analyses": { "subjects": [ "68T05", "62G08" ], "keywords": [ "support vector machines", "empirical risk minimization", "feature elimination", "correct feature space", "assumptions" ], "note": { "typesetting": "TeX", "pages": 50, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2013arXiv1304.5245D" } } }