{ "id": "1802.04350", "version": "v1", "published": "2018-02-12T20:31:58.000Z", "updated": "2018-02-12T20:31:58.000Z", "title": "On the Sample Complexity of Learning from a Sequence of Experiments", "authors": [ "Longyun Guo", "Jean Honorio", "John Morgan" ], "comment": "9 pages, 2 figures", "categories": [ "cs.LG", "stat.ML" ], "abstract": "We analyze the sample complexity of a new problem: learning from a sequence of experiments. In this problem, the learner should choose a hypothesis that performs well with respect to an infinite sequence of experiments, and their related data distributions. In practice, the learner can only perform m experiments with a total of N samples drawn from those data distributions. By using a Rademacher complexity approach, we show that the gap between the training and generation error is O((m/N)^0.5). We also provide some examples for linear prediction, two-layer neural networks and kernel methods.", "revisions": [ { "version": "v1", "updated": "2018-02-12T20:31:58.000Z" } ], "analyses": { "keywords": [ "sample complexity", "experiments", "rademacher complexity approach", "two-layer neural networks", "kernel methods" ], "note": { "typesetting": "TeX", "pages": 9, "language": "en", "license": "arXiv", "status": "editable" } } }