{ "id": "2006.09637", "version": "v1", "published": "2020-06-17T03:45:25.000Z", "updated": "2020-06-17T03:45:25.000Z", "title": "FedCD: Improving Performance in non-IID Federated Learning", "authors": [ "Kavya Kopparapu", "Eric Lin", "Jessica Zhao" ], "categories": [ "cs.LG", "cs.DC", "stat.ML" ], "abstract": "Federated learning has been widely applied to enable decentralized devices, which each have their own local data, to learn a shared model. However, learning from real-world data can be challenging, as it is rarely identically and independently distributed (IID) across edge devices (a key assumption for current high-performing and low-bandwidth algorithms). We present a novel approach, FedCD, which clones and deletes models to dynamically group devices with similar data. Experiments on the CIFAR-10 dataset show that FedCD achieves higher accuracy and faster convergence compared to a FedAvg baseline on non-IID data while incurring minimal computation, communication, and storage overheads.", "revisions": [ { "version": "v1", "updated": "2020-06-17T03:45:25.000Z" } ], "analyses": { "keywords": [ "non-iid federated learning", "improving performance", "fedcd achieves higher accuracy", "edge devices", "low-bandwidth algorithms" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }