@inproceedings{f7fe87c1170f4acea706a2ac53f88b26,
title = "Adaptive Stochastic Primal-Dual Coordinate Descent for Separable Saddle Point Problems",
abstract = "We consider a generic convex-concave saddle point problem with a separable structure, a form that covers a wide-ranged machine learning applications. Under this problem structure, we follow the framework of primal-dual updates for saddle point problems, and incorporate stochastic block coordinate descent with adaptive stepsizes into this framework. We theoretically show that our proposal of adaptive stepsizes potentially achieves a sharper linear convergence rate compared with the existing methods. Additionally, since we can select “mini-batch” of block coordinates to update, our method is also amenable to parallel processing for large-scale data. We apply the proposed method to regularized empirical risk minimization and show that it performs comparably or, more often, better than state-of-the-art methods on both synthetic and real-world data sets.",
keywords = "Large-scale optimization, Parallel optimization, Stochastic coordinate descent, Convex-concave saddle point problems",
author = "Zhanxing Zhu and Amos Storkey",
year = "2015",
doi = "10.1007/978-3-319-23528-8_40",
language = "English",
isbn = "978-3-319-23527-1",
series = "Lecture Notes in Computer Science",
publisher = "Springer",
pages = "645--658",
editor = "Annalisa Appice and Rodrigues, {Pedro Pereira} and {Santos Costa}, V{\'i}tor and Carlos Soares and Jo{\~a}o Gama and Al{\'i}pio Jorge",
booktitle = "Machine Learning and Knowledge Discovery in Databases",
address = "United Kingdom",
}