{ "id": "2007.06168", "version": "v1", "published": "2020-07-13T03:27:45.000Z", "updated": "2020-07-13T03:27:45.000Z", "title": "Model Fusion with Kullback--Leibler Divergence", "authors": [ "Sebastian Claici", "Mikhail Yurochkin", "Soumya Ghosh", "Justin Solomon" ], "comment": "ICML 2020", "categories": [ "cs.LG", "stat.ML" ], "abstract": "We propose a method to fuse posterior distributions learned from heterogeneous datasets. Our algorithm relies on a mean field assumption for both the fused model and the individual dataset posteriors and proceeds using a simple assign-and-average approach. The components of the dataset posteriors are assigned to the proposed global model components by solving a regularized variant of the assignment problem. The global components are then updated based on these assignments by their mean under a KL divergence. For exponential family variational distributions, our formulation leads to an efficient non-parametric algorithm for computing the fused model. Our algorithm is easy to describe and implement, efficient, and competitive with state-of-the-art on motion capture analysis, topic modeling, and federated learning of Bayesian neural networks.", "revisions": [ { "version": "v1", "updated": "2020-07-13T03:27:45.000Z" } ], "analyses": { "keywords": [ "kullback-leibler divergence", "model fusion", "mean field assumption", "exponential family variational distributions", "fused model" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }