{ "id": "2107.08011", "version": "v1", "published": "2021-07-16T16:59:40.000Z", "updated": "2021-07-16T16:59:40.000Z", "title": "Adaptive first-order methods revisited: Convex optimization without Lipschitz requirements", "authors": [ "Kimon Antonakopoulos", "Panayotis Mertikopoulos" ], "comment": "34 pages, 4 figures", "categories": [ "math.OC", "cs.LG" ], "abstract": "We propose a new family of adaptive first-order methods for a class of convex minimization problems that may fail to be Lipschitz continuous or smooth in the standard sense. Specifically, motivated by a recent flurry of activity on non-Lipschitz (NoLips) optimization, we consider problems that are continuous or smooth relative to a reference Bregman function - as opposed to a global, ambient norm (Euclidean or otherwise). These conditions encompass a wide range of problems with singular objectives, such as Fisher markets, Poisson tomography, D-design, and the like. In this setting, the application of existing order-optimal adaptive methods - like UnixGrad or AcceleGrad - is not possible, especially in the presence of randomness and uncertainty. The proposed method - which we call adaptive mirror descent (AdaMir) - aims to close this gap by concurrently achieving min-max optimal rates in problems that are relatively continuous or smooth, including stochastic ones.", "revisions": [ { "version": "v1", "updated": "2021-07-16T16:59:40.000Z" } ], "analyses": { "subjects": [ "90C25", "90C15", "90C30", "68Q25", "90C60" ], "keywords": [ "adaptive first-order methods", "convex optimization", "lipschitz requirements", "concurrently achieving min-max optimal rates", "reference bregman function" ], "note": { "typesetting": "TeX", "pages": 34, "language": "en", "license": "arXiv", "status": "editable" } } }