{ "id": "1806.05823", "version": "v1", "published": "2018-06-15T06:34:34.000Z", "updated": "2018-06-15T06:34:34.000Z", "title": "Primal-dual residual networks", "authors": [ "Christoph Brauer", "Dirk Lorenz" ], "categories": [ "stat.ML", "cs.LG", "math.OC" ], "abstract": "In this work, we propose a deep neural network architecture motivated by primal-dual splitting methods from convex optimization. We show theoretically that there exists a close relation between the derived architecture and residual networks, and further investigate this connection in numerical experiments. Moreover, we demonstrate how our approach can be used to unroll optimization algorithms for certain problems with hard constraints. Using the example of speech dequantization, we show that our method can outperform classical splitting methods when both are applied to the same task.", "revisions": [ { "version": "v1", "updated": "2018-06-15T06:34:34.000Z" } ], "analyses": { "keywords": [ "primal-dual residual networks", "deep neural network architecture", "unroll optimization algorithms", "convex optimization", "close relation" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }