@inproceedings{33937831b8164d449fa335ac88f15017,
title = "From local sgd to local fixed-point methods for federated learning",
abstract = "Most algorithms for solving optimization problems or finding saddle points of convex-concave functions are fixed-point algorithms. In this work we consider the generic problem of finding a fixed point of an average of operators, or an approximation thereof, in a distributed setting. Our work is motivated by the needs of federated learning. In this context, each local operator models the computations done locally on a mobile device. We investigate two strategies to achieve such a consensus: one based on a fixed number of local steps, and the other based on randomized computations. In both cases, the goal is to limit communication of the locally-computed variables, which is often the bottleneck in distributed frameworks. We perform convergence analysis of both methods and conduct a number of experiments highlighting the benefits of our approach.",
author = "Grigory Malinovsky and Dmitry Kovalev and Elnur Gasanov and Laurent Condat and Peter Richt{\'a}rik",
note = "Publisher Copyright: {\textcopyright} 2020 37th International Conference on Machine Learning, ICML 2020. All rights reserved.; 37th International Conference on Machine Learning, ICML 2020 ; Conference date: 13-07-2020 Through 18-07-2020",
year = "2020",
language = "English (US)",
series = "37th International Conference on Machine Learning, ICML 2020",
publisher = "International Machine Learning Society (IMLS)",
pages = "6648--6657",
editor = "Hal Daume and Aarti Singh",
booktitle = "37th International Conference on Machine Learning, ICML 2020",
}