{ "id": "1908.03006", "version": "v1", "published": "2019-08-08T10:38:01.000Z", "updated": "2019-08-08T10:38:01.000Z", "title": "Sparse $\\ell^q$-regularization of inverse problems with deep learning", "authors": [ "Markus Haltmeier", "Linh Nguyen", "Daniel Obmann", "Johannes Schwab" ], "categories": [ "math.NA", "cs.LG", "cs.NA", "math.OC" ], "abstract": "We propose a sparse reconstruction framework for solving inverse problems. Opposed to existing sparse reconstruction techniques that are based on linear sparsifying transforms, we train an encoder-decoder network $D \\circ E$ with $E$ acting as a nonlinear sparsifying transform. We minimize a Tikhonov functional which used a learned regularization term formed by the $\\ell^q$-norm of the encoder coefficients and a penalty for the distance to the data manifold. For this augmented sparse $\\ell^q$-approach, we present a full convergence analysis, derive convergence rates and describe a training strategy. As a main ingredient for the analysis we establish the coercivity of the augmented regularization term.", "revisions": [ { "version": "v1", "updated": "2019-08-08T10:38:01.000Z" } ], "analyses": { "keywords": [ "inverse problems", "deep learning", "existing sparse reconstruction techniques", "full convergence analysis", "sparse reconstruction framework" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }