@inproceedings{161410582dd44519ba23175aac2a2ad8,
title = "The Impossibility of Parallelizing Boosting",
abstract = "The aim of boosting is to convert a sequence of weak learners into a strong learner. At their heart, these methods are fully sequential. In this paper, we investigate the possibility of parallelizing boosting. Our main contribution is a strong negative result, implying that significant parallelization of boosting requires an exponential blow-up in the total computing resources needed for training.",
keywords = "Boosting, Generalization, Parallelization, Weak-to-Strong Learning",
author = "Amin Karbasi and Larsen, {Kasper Green}",
note = "Publisher Copyright: {\textcopyright} 2024 A. Karbasi & K.G. Larsen.; 35th International Conference on Algorithmic Learning Theory, ALT 2024 ; Conference date: 25-02-2024 Through 28-02-2024",
year = "2024",
month = feb,
language = "English",
volume = "237",
series = "Proceedings of Machine Learning Research",
publisher = "MLResearch Press",
pages = "635--653",
booktitle = "Proceedings of Machine Learning Research",
}