@inproceedings{7606, abstract = {We derive a tight lower bound on equivocation (conditional entropy), or equivalently a tight upper bound on mutual information between a signal variable and channel outputs. The bound is in terms of the joint distribution of the signals and maximum a posteriori decodes (most probable signals given channel output). As part of our derivation, we describe the key properties of the distribution of signals, channel outputs and decodes, that minimizes equivocation and maximizes mutual information. This work addresses a problem in data analysis, where mutual information between signals and decodes is sometimes used to lower bound the mutual information between signals and channel outputs. Our result provides a corresponding upper bound.}, author = {Hledik, Michal and Sokolowski, Thomas R and Tkačik, Gašper}, booktitle = {IEEE Information Theory Workshop, ITW 2019}, isbn = {9781538669006}, location = {Visby, Sweden}, publisher = {IEEE}, title = {{A tight upper bound on mutual information}}, doi = {10.1109/ITW44776.2019.8989292}, year = {2019}, }