{ "id": "1803.06765", "version": "v1", "published": "2018-03-19T00:17:42.000Z", "updated": "2018-03-19T00:17:42.000Z", "title": "Sparse Regularization via Convex Analysis", "authors": [ "Ivan Selesnick" ], "journal": "IEEE Transactions on Signal Processing, vol. 65, no. 17, pp. 4481-4494, 2017", "doi": "10.1109/TSP.2017.2711501", "categories": [ "math.OC" ], "abstract": "Sparse approximate solutions to linear equations are classically obtained via L1 norm regularized least squares, but this method often underestimates the true solution. As an alternative to the L1 norm, this paper proposes a class of non-convex penalty functions that maintain the convexity of the least squares cost function to be minimized, and avoids the systematic underestimation characteristic of L1 norm regularization. The proposed penalty function is a multivariate generalization of the minimax-concave (MC) penalty. It is defined in terms of a new multivariate generalization of the Huber function, which in turn is defined via infimal convolution. The proposed sparse-regularized least squares cost function can be minimized by proximal algorithms comprising simple computations.", "revisions": [ { "version": "v1", "updated": "2018-03-19T00:17:42.000Z" } ], "analyses": { "keywords": [ "sparse regularization", "convex analysis", "squares cost function", "l1 norm", "multivariate generalization" ], "tags": [ "journal article" ], "publication": { "publisher": "IEEE" }, "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }