{ "id": "1405.5139", "version": "v3", "published": "2014-05-20T16:10:19.000Z", "updated": "2017-04-12T13:19:25.000Z", "title": "Algorithmic identification of probabilities is hard", "authors": [ "Laurent Bienvenu", "Santiago Figueira", "Benoit Monin", "Alexander Shen" ], "categories": [ "math.LO" ], "abstract": "Suppose that we are given an infinite binary sequence which is random for a Bernoulli measure of parameter $p$. By the law of large numbers, the frequency of zeros in the sequence tends to~$p$, and thus we can get better and better approximations of $p$ as we read the sequence. We study in this paper a similar question, but from the viewpoint of inductive inference. We suppose now that $p$ is a computable real, but one asks for more: as we are reading more and more bits of our random sequence, we have to eventually guess the exact parameter $p$ (in the form of a Turing code). Can one do such a thing uniformly on all sequences that are random for computable Bernoulli measures, or even on a `large enough' fraction of them? In this paper, we give a negative answer to this question. In fact, we prove a very general negative result which extends far beyond the class of Bernoulli measures.", "revisions": [ { "version": "v2", "updated": "2014-05-24T12:18:25.000Z", "comment": null, "journal": null, "doi": null, "authors": [ "Laurent Bienvenu", "Benoit Monin", "Alexander Shen" ] }, { "version": "v3", "updated": "2017-04-12T13:19:25.000Z" } ], "analyses": { "subjects": [ "03D32", "68Q30", "68Q32" ], "keywords": [ "algorithmic identification", "bernoulli measure", "probabilities", "infinite binary sequence", "large numbers" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2014arXiv1405.5139B" } } }