@inproceedings{10.1145/3428662.3428792, author = {Danner, G\'{a}bor and Heged\H{u}s, Istv\'{a}n and Jelasity, M\'{a}rk}, title = {Decentralized Machine Learning Using Compressed Push-Pull Averaging}, year = {2020}, isbn = {9781450381970}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, url = {https://doi.org/10.1145/3428662.3428792}, doi = {10.1145/3428662.3428792}, abstract = {For decentralized learning algorithms communication efficiency is a central issue. On the one hand, good machine learning models require more and more parameters. On the other hand, there is a relatively large cost for transferring data via P2P channels due to bandwidth and unreliability issues. Here, we propose a novel compression mechanism for P2P machine learning that is based on the application of stateful codecs over P2P links. In addition, we also rely on transfer learning for extra compression. This means that we train a relatively small model on top of a high quality pre-trained feature set that is fixed. We demonstrate these contributions through an experimental analysis over a real smartphone trace.}, booktitle = {Proceedings of the 1st International Workshop on Distributed Infrastructure for Common Good}, pages = {31–36}, numpages = {6}, keywords = {decentralized averaging, compressed communication, machine learning}, location = {Delft, Netherlands}, series = {DICG'20} }