@inproceedings{57b35fe24af340618ff16eed41f96be8,
title = "Fast distributed coordinate descent for non-strongly convex losses",
abstract = "We propose an efficient distributed randomized coordinate descent method for minimizing regularized non-strongly convex loss functions. The method attains the optimal O(1/k2) convergence rate, where k is the iteration counter. The core of the work is the theoretical study of stepsize parameters. We have implemented the method on Archer - the largest super-computer in the UK - and show that the method is capable of solving a (synthetic) LASSO optimization problem with 50 billion variables.",
keywords = "Coordinate descent, acceleration, distributed algorithms",
author = "Olivier Fercoq and Zheng Qu and Peter Richt{\'a}rik and Martin Tak{\'a}{\v c}",
note = "Publisher Copyright: {\textcopyright} 2014 IEEE.; 2014 24th IEEE International Workshop on Machine Learning for Signal Processing, MLSP 2014 ; Conference date: 21-09-2014 Through 24-09-2014",
year = "2014",
month = nov,
day = "14",
doi = "10.1109/MLSP.2014.6958862",
language = "English (US)",
series = "IEEE International Workshop on Machine Learning for Signal Processing, MLSP",
publisher = "IEEE Computer Society",
editor = "Mamadou Mboup and Tulay Adali and Eric Moreau and Jan Larsen",
booktitle = "IEEE International Workshop on Machine Learning for Signal Processing, MLSP",
address = "United States",
}