{ "id": "cs/0701050", "version": "v2", "published": "2007-01-08T16:35:58.000Z", "updated": "2007-04-13T13:33:49.000Z", "title": "A Simple Proof of the Entropy-Power Inequality via Properties of Mutual Information", "authors": [ "Olivier Rioul" ], "comment": "5 pages, accepted for presentation at the IEEE International Symposium on Information Theory 2007", "categories": [ "cs.IT", "math.IT" ], "abstract": "While most useful information theoretic inequalities can be deduced from the basic properties of entropy or mutual information, Shannon's entropy power inequality (EPI) seems to be an exception: available information theoretic proofs of the EPI hinge on integral representations of differential entropy using either Fisher's information (FI) or minimum mean-square error (MMSE). In this paper, we first present a unified view of proofs via FI and MMSE, showing that they are essentially dual versions of the same proof, and then fill the gap by providing a new, simple proof of the EPI, which is solely based on the properties of mutual information and sidesteps both FI or MMSE representations.", "revisions": [ { "version": "v2", "updated": "2007-04-13T13:33:49.000Z" } ], "analyses": { "keywords": [ "mutual information", "simple proof", "entropy-power inequality", "properties", "shannons entropy power inequality" ], "note": { "typesetting": "TeX", "pages": 5, "language": "en", "license": "arXiv", "status": "editable", "adsabs": "2007cs........1050R" } } }