[{"publisher":"IEEE","article_processing_charge":"No","doi":"10.1109/TIT.2025.3587340","oa":1,"department":[{"_id":"MaMo"}],"month":"07","article_type":"original","author":[{"first_name":"Amedeo Roberto","id":"9583e921-e1ad-11ec-9862-cef099626dc9","full_name":"Esposito, Amedeo Roberto","last_name":"Esposito"},{"first_name":"Michael","full_name":"Gastpar, Michael","last_name":"Gastpar"},{"first_name":"Ibrahim","last_name":"Issa","full_name":"Issa, Ibrahim"}],"publication_identifier":{"eissn":["1557-9654"],"issn":["0018-9448"]},"arxiv":1,"OA_type":"green","oa_version":"Preprint","date_created":"2025-07-27T22:01:26Z","title":"Sibson α-mutual information and its variational representations","publication_status":"epub_ahead","status":"public","scopus_import":"1","publication":"IEEE Transactions on Information Theory","date_updated":"2026-02-16T11:49:40Z","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","year":"2025","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2405.08352","open_access":"1"}],"OA_place":"repository","date_published":"2025-07-11T00:00:00Z","quality_controlled":"1","type":"journal_article","external_id":{"arxiv":["2405.08352"]},"abstract":[{"text":"Information measures can be constructed from Rényi divergences much like mutual information from Kullback-Leibler divergence. One such information measure is known as Sibson α-mutual information and has received renewed attention recently in several contexts: concentration of measure under dependence, statistical learning, hypothesis testing, and estimation theory. In this paper, we survey and extend the state of the art. In particular, we introduce variational representations for Sibson α-mutual information and employ them in each described context to derive novel results. Namely, we produce generalized Transportation-Cost inequalities and Fano-type inequalities. We also present an overview of known applications, spanning from learning theory and Bayesian risk to universal prediction.","lang":"eng"}],"_id":"20081","language":[{"iso":"eng"}],"day":"11","citation":{"ista":"Esposito AR, Gastpar M, Issa I. 2025. Sibson α-mutual information and its variational representations. IEEE Transactions on Information Theory.","ama":"Esposito AR, Gastpar M, Issa I. Sibson α-mutual information and its variational representations. <i>IEEE Transactions on Information Theory</i>. 2025. doi:<a href=\"https://doi.org/10.1109/TIT.2025.3587340\">10.1109/TIT.2025.3587340</a>","chicago":"Esposito, Amedeo Roberto, Michael Gastpar, and Ibrahim Issa. “Sibson α-Mutual Information and Its Variational Representations.” <i>IEEE Transactions on Information Theory</i>. IEEE, 2025. <a href=\"https://doi.org/10.1109/TIT.2025.3587340\">https://doi.org/10.1109/TIT.2025.3587340</a>.","ieee":"A. R. Esposito, M. Gastpar, and I. Issa, “Sibson α-mutual information and its variational representations,” <i>IEEE Transactions on Information Theory</i>. IEEE, 2025.","short":"A.R. Esposito, M. Gastpar, I. Issa, IEEE Transactions on Information Theory (2025).","mla":"Esposito, Amedeo Roberto, et al. “Sibson α-Mutual Information and Its Variational Representations.” <i>IEEE Transactions on Information Theory</i>, IEEE, 2025, doi:<a href=\"https://doi.org/10.1109/TIT.2025.3587340\">10.1109/TIT.2025.3587340</a>.","apa":"Esposito, A. R., Gastpar, M., &#38; Issa, I. (2025). Sibson α-mutual information and its variational representations. <i>IEEE Transactions on Information Theory</i>. IEEE. <a href=\"https://doi.org/10.1109/TIT.2025.3587340\">https://doi.org/10.1109/TIT.2025.3587340</a>"}},{"status":"public","page":"3178-3183","scopus_import":"1","publication":"Proceedings of the 2024 IEEE International Symposium on Information Theory","date_updated":"2025-09-08T09:18:00Z","isi":1,"year":"2024","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","date_published":"2024-08-19T00:00:00Z","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2403.10656 ","open_access":"1"}],"_id":"17893","language":[{"iso":"eng"}],"abstract":[{"text":"Strong data processing inequalities (SDPI) are an important object of study in Information Theory and have been well studied for f -divergences. Universal upper and lower bounds have been provided along with several applications, connecting them to impossibility (converse) results, concentration of measure, hypercontractivity, and so on. In this paper, we study Renyi divergence and the corresponding SDPI constant whose behavior seems to deviate from that of ordinary <1>-divergences. In particular, one can find examples showing that the universal upper bound relating its SDPI constant to the one of Total Variation does not hold in general. In this work, we prove, however, that the universal lower bound involving the SDPI constant of the Chi-square divergence does indeed hold. Furthermore, we also provide a characterization of the distribution that achieves the supremum when is equal to 2 and consequently compute the SDPI constant for Renyi divergence of the general binary channel.","lang":"eng"}],"external_id":{"arxiv":["2403.10656"],"isi":["001304426903055"]},"type":"conference","quality_controlled":"1","citation":{"apa":"Jin, L., Esposito, A. R., &#38; Gastpar, M. (2024). Properties of the strong data processing constant for Rényi divergence. In <i>Proceedings of the 2024 IEEE International Symposium on Information Theory</i> (pp. 3178–3183). Athens, Greece: Institute of Electrical and Electronics Engineers. <a href=\"https://doi.org/10.1109/ISIT57864.2024.10619367\">https://doi.org/10.1109/ISIT57864.2024.10619367</a>","mla":"Jin, Lifu, et al. “Properties of the Strong Data Processing Constant for Rényi Divergence.” <i>Proceedings of the 2024 IEEE International Symposium on Information Theory</i>, Institute of Electrical and Electronics Engineers, 2024, pp. 3178–83, doi:<a href=\"https://doi.org/10.1109/ISIT57864.2024.10619367\">10.1109/ISIT57864.2024.10619367</a>.","short":"L. Jin, A.R. Esposito, M. Gastpar, in:, Proceedings of the 2024 IEEE International Symposium on Information Theory, Institute of Electrical and Electronics Engineers, 2024, pp. 3178–3183.","ieee":"L. Jin, A. R. Esposito, and M. Gastpar, “Properties of the strong data processing constant for Rényi divergence,” in <i>Proceedings of the 2024 IEEE International Symposium on Information Theory</i>, Athens, Greece, 2024, pp. 3178–3183.","chicago":"Jin, Lifu, Amedeo Roberto Esposito, and Michael Gastpar. “Properties of the Strong Data Processing Constant for Rényi Divergence.” In <i>Proceedings of the 2024 IEEE International Symposium on Information Theory</i>, 3178–83. Institute of Electrical and Electronics Engineers, 2024. <a href=\"https://doi.org/10.1109/ISIT57864.2024.10619367\">https://doi.org/10.1109/ISIT57864.2024.10619367</a>.","ama":"Jin L, Esposito AR, Gastpar M. Properties of the strong data processing constant for Rényi divergence. In: <i>Proceedings of the 2024 IEEE International Symposium on Information Theory</i>. Institute of Electrical and Electronics Engineers; 2024:3178-3183. doi:<a href=\"https://doi.org/10.1109/ISIT57864.2024.10619367\">10.1109/ISIT57864.2024.10619367</a>","ista":"Jin L, Esposito AR, Gastpar M. 2024. Properties of the strong data processing constant for Rényi divergence. Proceedings of the 2024 IEEE International Symposium on Information Theory. ISIT: International Symposium on Information Theory, 3178–3183."},"day":"19","corr_author":"1","article_processing_charge":"No","publisher":"Institute of Electrical and Electronics Engineers","acknowledgement":"The work in this paper was supported in part by the Swiss National Science Foundation under Grant 200364.\r\n","oa":1,"doi":"10.1109/ISIT57864.2024.10619367","author":[{"first_name":"Lifu","full_name":"Jin, Lifu","last_name":"Jin"},{"last_name":"Esposito","full_name":"Esposito, Amedeo Roberto","first_name":"Amedeo Roberto","id":"9583e921-e1ad-11ec-9862-cef099626dc9"},{"first_name":"Michael","last_name":"Gastpar","full_name":"Gastpar, Michael"}],"department":[{"_id":"MaMo"}],"month":"08","arxiv":1,"publication_identifier":{"isbn":["9798350382846"],"issn":["2157-8095"]},"title":"Properties of the strong data processing constant for Rényi divergence","date_created":"2024-09-08T22:01:12Z","oa_version":"Preprint","publication_status":"published","conference":{"start_date":"2024-07-07","name":"ISIT: International Symposium on Information Theory","location":"Athens, Greece","end_date":"2024-07-12"}},{"scopus_import":"1","page":"2110-2115","status":"public","publication":"Proceedings of the 2024 IEEE International Symposium on Information Theory ","date_updated":"2025-09-08T09:18:44Z","isi":1,"date_published":"2024-08-19T00:00:00Z","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","year":"2024","day":"19","citation":{"apa":"Esposito, A. R., Gastpar, M., &#38; Issa, I. (2024). Variational characterizations of Sibson’s α-mutual information. In <i>Proceedings of the 2024 IEEE International Symposium on Information Theory </i> (pp. 2110–2115). Athens, Greece: Institute of Electrical and Electronics Engineers. <a href=\"https://doi.org/10.1109/ISIT57864.2024.10619378\">https://doi.org/10.1109/ISIT57864.2024.10619378</a>","ieee":"A. R. Esposito, M. Gastpar, and I. Issa, “Variational characterizations of Sibson’s α-mutual information,” in <i>Proceedings of the 2024 IEEE International Symposium on Information Theory </i>, Athens, Greece, 2024, pp. 2110–2115.","short":"A.R. Esposito, M. Gastpar, I. Issa, in:, Proceedings of the 2024 IEEE International Symposium on Information Theory , Institute of Electrical and Electronics Engineers, 2024, pp. 2110–2115.","mla":"Esposito, Amedeo Roberto, et al. “Variational Characterizations of Sibson’s α-Mutual Information.” <i>Proceedings of the 2024 IEEE International Symposium on Information Theory </i>, Institute of Electrical and Electronics Engineers, 2024, pp. 2110–15, doi:<a href=\"https://doi.org/10.1109/ISIT57864.2024.10619378\">10.1109/ISIT57864.2024.10619378</a>.","chicago":"Esposito, Amedeo Roberto, Michael Gastpar, and Ibrahim Issa. “Variational Characterizations of Sibson’s α-Mutual Information.” In <i>Proceedings of the 2024 IEEE International Symposium on Information Theory </i>, 2110–15. Institute of Electrical and Electronics Engineers, 2024. <a href=\"https://doi.org/10.1109/ISIT57864.2024.10619378\">https://doi.org/10.1109/ISIT57864.2024.10619378</a>.","ista":"Esposito AR, Gastpar M, Issa I. 2024. Variational characterizations of Sibson’s α-mutual information. Proceedings of the 2024 IEEE International Symposium on Information Theory . ISIT: International Symposium on Information Theory, 2110–2115.","ama":"Esposito AR, Gastpar M, Issa I. Variational characterizations of Sibson’s α-mutual information. In: <i>Proceedings of the 2024 IEEE International Symposium on Information Theory </i>. Institute of Electrical and Electronics Engineers; 2024:2110-2115. doi:<a href=\"https://doi.org/10.1109/ISIT57864.2024.10619378\">10.1109/ISIT57864.2024.10619378</a>"},"external_id":{"isi":["001304426902023"]},"language":[{"iso":"eng"}],"_id":"17894","abstract":[{"lang":"eng","text":"Sibson's α -mutual information has received renewed attention recently in several contexts: concentration of measure under dependence, statistical learning, hypothesis testing, and estimation theory. In this work, we introduce several variational representations of Sibson's α -mutual information: 1) as a supremum over joint distributions of (a combination of) KL divergences; and 2) as a supremum over functions of opportune expected values. Leveraging them, we produce a variety of novel and known results, including a generalization of transportation-cost inequalities and Fano's inequality."}],"quality_controlled":"1","type":"conference","article_processing_charge":"No","corr_author":"1","publisher":"Institute of Electrical and Electronics Engineers","author":[{"last_name":"Esposito","full_name":"Esposito, Amedeo Roberto","first_name":"Amedeo Roberto","id":"9583e921-e1ad-11ec-9862-cef099626dc9"},{"last_name":"Gastpar","full_name":"Gastpar, Michael","first_name":"Michael"},{"first_name":"Ibrahim","full_name":"Issa, Ibrahim","last_name":"Issa"}],"month":"08","department":[{"_id":"MaMo"}],"doi":"10.1109/ISIT57864.2024.10619378","acknowledgement":"The work in this paper was supported in part by the Swiss National Science Foundation under Grant 200364.","date_created":"2024-09-08T22:01:12Z","title":"Variational characterizations of Sibson's α-mutual information","oa_version":"None","publication_identifier":{"isbn":["9798350382846"],"issn":["2157-8095"]},"publication_status":"published","conference":{"end_date":"2024-07-12","location":"Athens, Greece","name":"ISIT: International Symposium on Information Theory","start_date":"2024-07-07"}},{"publication_status":"published","oa_version":"Preprint","title":"Concentration without independence via information measures","date_created":"2024-03-24T23:01:00Z","publication_identifier":{"eissn":["1557-9654"],"issn":["0018-9448"]},"arxiv":1,"department":[{"_id":"MaMo"}],"month":"06","article_type":"original","project":[{"name":"Prix Lopez-Loretta 2019 - Marco Mondelli","_id":"059876FA-7A3F-11EA-A408-12923DDC885E"}],"volume":70,"author":[{"id":"9583e921-e1ad-11ec-9862-cef099626dc9","first_name":"Amedeo Roberto","full_name":"Esposito, Amedeo Roberto","last_name":"Esposito"},{"last_name":"Mondelli","full_name":"Mondelli, Marco","orcid":"0000-0002-3242-7020","id":"27EB676C-8706-11E9-9510-7717E6697425","first_name":"Marco"}],"doi":"10.1109/TIT.2024.3367767","oa":1,"related_material":{"record":[{"status":"public","id":"14922","relation":"earlier_version"}]},"publisher":"IEEE","article_processing_charge":"No","corr_author":"1","day":"01","citation":{"ista":"Esposito AR, Mondelli M. 2024. Concentration without independence via information measures. IEEE Transactions on Information Theory. 70(6), 3823–3839.","ama":"Esposito AR, Mondelli M. Concentration without independence via information measures. <i>IEEE Transactions on Information Theory</i>. 2024;70(6):3823-3839. doi:<a href=\"https://doi.org/10.1109/TIT.2024.3367767\">10.1109/TIT.2024.3367767</a>","chicago":"Esposito, Amedeo Roberto, and Marco Mondelli. “Concentration without Independence via Information Measures.” <i>IEEE Transactions on Information Theory</i>. IEEE, 2024. <a href=\"https://doi.org/10.1109/TIT.2024.3367767\">https://doi.org/10.1109/TIT.2024.3367767</a>.","ieee":"A. R. Esposito and M. Mondelli, “Concentration without independence via information measures,” <i>IEEE Transactions on Information Theory</i>, vol. 70, no. 6. IEEE, pp. 3823–3839, 2024.","short":"A.R. Esposito, M. Mondelli, IEEE Transactions on Information Theory 70 (2024) 3823–3839.","mla":"Esposito, Amedeo Roberto, and Marco Mondelli. “Concentration without Independence via Information Measures.” <i>IEEE Transactions on Information Theory</i>, vol. 70, no. 6, IEEE, 2024, pp. 3823–39, doi:<a href=\"https://doi.org/10.1109/TIT.2024.3367767\">10.1109/TIT.2024.3367767</a>.","apa":"Esposito, A. R., &#38; Mondelli, M. (2024). Concentration without independence via information measures. <i>IEEE Transactions on Information Theory</i>. IEEE. <a href=\"https://doi.org/10.1109/TIT.2024.3367767\">https://doi.org/10.1109/TIT.2024.3367767</a>"},"quality_controlled":"1","type":"journal_article","external_id":{"isi":["001230181100001"],"arxiv":["2303.07245"]},"language":[{"iso":"eng"}],"_id":"15172","abstract":[{"text":"We propose a novel approach to concentration for non-independent random variables. The main idea is to “pretend” that the random variables are independent and pay a multiplicative price measuring how far they are from actually being independent. This price is encapsulated in the Hellinger integral between the joint and the product of the marginals, which is then upper bounded leveraging tensorisation properties. Our bounds represent a natural generalisation of concentration inequalities in the presence of dependence: we recover exactly the classical bounds (McDiarmid’s inequality) when the random variables are independent. Furthermore, in a “large deviations” regime, we obtain the same decay in the probability as for the independent case, even when the random variables display non-trivial dependencies. To show this, we consider a number of applications of interest. First, we provide a bound for Markov chains with finite state space. Then, we consider the Simple Symmetric Random Walk, which is a non-contracting Markov chain, and a non-Markovian setting in which the stochastic process depends on its entire past. To conclude, we propose an application to Markov Chain Monte Carlo methods, where our approach leads to an improved lower bound on the minimum burn-in period required to reach a certain accuracy. In all of these settings, we provide a regime of parameters in which our bound fares better than what the state of the art can provide.","lang":"eng"}],"main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2303.07245","open_access":"1"}],"date_published":"2024-06-01T00:00:00Z","user_id":"317138e5-6ab7-11ef-aa6d-ffef3953e345","year":"2024","intvolume":"        70","isi":1,"publication":"IEEE Transactions on Information Theory","date_updated":"2025-09-04T13:06:53Z","scopus_import":"1","issue":"6","page":"3823-3839","status":"public"},{"type":"conference","quality_controlled":"1","_id":"14922","abstract":[{"text":"We propose a novel approach to concentration for non-independent random variables. The main idea is to ``pretend'' that the random variables are independent and pay a multiplicative price measuring how far they are from actually being independent. This price is encapsulated in the Hellinger integral between the joint and the product of the marginals, which is then upper bounded leveraging tensorisation properties. Our bounds represent a natural generalisation of concentration inequalities in the presence of dependence: we recover exactly the classical bounds (McDiarmid's inequality) when the random variables are independent. Furthermore, in a ``large deviations'' regime, we obtain the same decay in the probability as for the independent case, even when the random variables display non-trivial dependencies. To show this, we consider a number of applications of interest. First, we provide a bound for Markov chains with finite state space. Then, we consider the Simple Symmetric Random Walk, which is a non-contracting Markov chain, and a non-Markovian setting in which the stochastic process depends on its entire past. To conclude, we propose an application to Markov Chain Monte Carlo methods, where our approach leads to an improved lower bound on the minimum burn-in period required to reach a certain accuracy. In all of these settings, we provide a regime of parameters in which our bound fares better than what the state of the art can provide.","lang":"eng"}],"language":[{"iso":"eng"}],"external_id":{"arxiv":["2303.07245"]},"citation":{"chicago":"Esposito, Amedeo Roberto, and Marco Mondelli. “Concentration without Independence via Information Measures.” In <i>Proceedings of 2023 IEEE International Symposium on Information Theory</i>, 400–405. IEEE, 2023. <a href=\"https://doi.org/10.1109/isit54713.2023.10206899\">https://doi.org/10.1109/isit54713.2023.10206899</a>.","ama":"Esposito AR, Mondelli M. Concentration without independence via information measures. In: <i>Proceedings of 2023 IEEE International Symposium on Information Theory</i>. IEEE; 2023:400-405. doi:<a href=\"https://doi.org/10.1109/isit54713.2023.10206899\">10.1109/isit54713.2023.10206899</a>","ista":"Esposito AR, Mondelli M. 2023. Concentration without independence via information measures. Proceedings of 2023 IEEE International Symposium on Information Theory. ISIT: International Symposium on Information Theory, 400–405.","apa":"Esposito, A. R., &#38; Mondelli, M. (2023). Concentration without independence via information measures. In <i>Proceedings of 2023 IEEE International Symposium on Information Theory</i> (pp. 400–405). Taipei, Taiwan: IEEE. <a href=\"https://doi.org/10.1109/isit54713.2023.10206899\">https://doi.org/10.1109/isit54713.2023.10206899</a>","mla":"Esposito, Amedeo Roberto, and Marco Mondelli. “Concentration without Independence via Information Measures.” <i>Proceedings of 2023 IEEE International Symposium on Information Theory</i>, IEEE, 2023, pp. 400–05, doi:<a href=\"https://doi.org/10.1109/isit54713.2023.10206899\">10.1109/isit54713.2023.10206899</a>.","short":"A.R. Esposito, M. Mondelli, in:, Proceedings of 2023 IEEE International Symposium on Information Theory, IEEE, 2023, pp. 400–405.","ieee":"A. R. Esposito and M. Mondelli, “Concentration without independence via information measures,” in <i>Proceedings of 2023 IEEE International Symposium on Information Theory</i>, Taipei, Taiwan, 2023, pp. 400–405."},"day":"30","year":"2023","user_id":"2DF688A6-F248-11E8-B48F-1D18A9856A87","main_file_link":[{"url":"https://doi.org/10.48550/arXiv.2303.07245","open_access":"1"}],"date_published":"2023-06-30T00:00:00Z","publication":"Proceedings of 2023 IEEE International Symposium on Information Theory","date_updated":"2025-09-04T13:06:52Z","page":"400-405","status":"public","scopus_import":"1","publication_status":"published","conference":{"name":"ISIT: International Symposium on Information Theory","start_date":"2023-06-25","end_date":"2023-06-30","location":"Taipei, Taiwan"},"publication_identifier":{"eisbn":["9781665475549"],"eissn":["2157-8117"]},"arxiv":1,"oa_version":"Preprint","title":"Concentration without independence via information measures","date_created":"2024-02-02T11:18:40Z","acknowledgement":"The authors are partially supported by the 2019 Lopez-Loreta Prize. They would also like to thank Professor Jan Maas for providing valuable suggestions and comments on an early version of the work.","doi":"10.1109/isit54713.2023.10206899","oa":1,"project":[{"name":"Prix Lopez-Loretta 2019 - Marco Mondelli","_id":"059876FA-7A3F-11EA-A408-12923DDC885E"}],"department":[{"_id":"MaMo"}],"month":"06","author":[{"full_name":"Esposito, Amedeo Roberto","last_name":"Esposito","first_name":"Amedeo Roberto","id":"9583e921-e1ad-11ec-9862-cef099626dc9"},{"id":"27EB676C-8706-11E9-9510-7717E6697425","first_name":"Marco","orcid":"0000-0002-3242-7020","full_name":"Mondelli, Marco","last_name":"Mondelli"}],"publisher":"IEEE","corr_author":"1","article_processing_charge":"No","related_material":{"record":[{"id":"15172","status":"public","relation":"later_version"}]}}]
