{ "id": "2211.15880", "version": "v1", "published": "2022-11-29T02:33:56.000Z", "updated": "2022-11-29T02:33:56.000Z", "title": "Mirror descent of Hopfield model", "authors": [ "Hyungjoon Soh", "Dongyeob Kim", "Juno Hwang", "Junghyo Jo" ], "comment": "3 figures", "categories": [ "cs.LG", "math.OC" ], "abstract": "Mirror descent is a gradient descent method that uses a dual space of parametric models. The great idea has been developed in convex optimization, but not yet widely applied in machine learning. In this study, we provide a possible way that the mirror descent can help data-driven parameter initialization of neural networks. We adopt the Hopfield model as a prototype of neural networks, we demonstrate that the mirror descent can train the model more effectively than the usual gradient descent with random parameter initialization.", "revisions": [ { "version": "v1", "updated": "2022-11-29T02:33:56.000Z" } ], "analyses": { "keywords": [ "mirror descent", "hopfield model", "neural networks", "help data-driven parameter initialization", "random parameter initialization" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }