{ "id": "2110.12347", "version": "v2", "published": "2021-10-24T04:03:00.000Z", "updated": "2022-04-10T03:50:07.000Z", "title": "Acceleration in Distributed Optimization under Similarity", "authors": [ "Ye Tian", "Gesualdo Scutari", "Tianyu Cao", "Alexander Gasnikov" ], "categories": [ "math.OC", "cs.LG" ], "abstract": "We study distributed (strongly convex) optimization problems over a network of agents, with no centralized nodes. The loss functions of the agents are assumed to be \\textit{similar}, due to statistical data similarity or otherwise. In order to reduce the number of communications to reach a solution accuracy, we proposed a {\\it preconditioned, accelerated} distributed method. An $\\varepsilon$-solution is achieved in $\\tilde{\\mathcal{O}}\\big(\\sqrt{\\frac{\\beta/\\mu}{1-\\rho}}\\log1/\\varepsilon\\big)$ number of communications steps, where $\\beta/\\mu$ is the relative condition number between the global and local loss functions, and $\\rho$ characterizes the connectivity of the network. This rate matches (up to poly-log factors) lower complexity communication bounds of distributed gossip-algorithms applied to the class of problems of interest. Numerical results show significant communication savings with respect to existing accelerated distributed schemes, especially when solving ill-conditioned problems.", "revisions": [ { "version": "v2", "updated": "2022-04-10T03:50:07.000Z" } ], "analyses": { "keywords": [ "distributed optimization", "lower complexity communication bounds", "acceleration", "significant communication savings", "local loss functions" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }