@inproceedings{54cb45be10384a1f8d540a73e696df00,
title = "Joint Privacy Enhancement and Quantization in Federated Learning.",
abstract = "Federated learning (FL) is an emerging paradigm for training machine learning models using possibly private data available at edge devices. Among the key challenges associated with FL are first the need to preserve the privacy of the local data sets, and second the communication load due to the repeated exchange of updated models; both are often tackled individually with methods whose operation distorts the updated models, e.g., local differential privacy (LDP) mechanisms and lossy compres- sion, respectively. In this work we propose a method for joint privacy enhancement and quantization (JoPEQ), unifying lossy compression and privacy enhancement for FL. JoPEQ utilizes universal vector quantization, where distortion is statistically equivalent to additive noise, and augments the compression distortion with dedicated privacy preserving noise to simultaneously achieve compression and a desired privacy level. We numerically demonstrate that JoPEQ reduces the overall distortion compared to individual LDP and compression, which is translated into improved trained models.",
keywords = "Training, Privacy, Vector quantization, Stochastic processes, Reliability theory, Distortion, Collaborative work",
author = "Natalie Lang and Nir Shlezinger",
note = "Publisher Copyright: {\textcopyright} 2022 IEEE.; 2022 IEEE International Symposium on Information Theory, ISIT 2022 ; Conference date: 26-06-2022 Through 01-07-2022",
year = "2022",
month = jul,
doi = "10.1109/ISIT50566.2022.9834551",
language = "English",
series = "IEEE International Symposium on Information Theory - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers",
pages = "2040--2045",
booktitle = "2022 IEEE International Symposium on Information Theory, ISIT 2022",
address = "United States",
}