@article{henter2016minimum, title={Minimum Entropy Rate Simplification of Stochastic Processes}, author={Henter, Gustav Eje and Kleijn, W. Bastiaan}, journal={IEEE T. Pattern Anal.}, abstract={We propose minimum entropy rate simplification (MERS), an information-theoretic, parameterization-independent framework for simplifying generative models of stochastic processes. Applications include improving model quality for sampling tasks by concentrating the probability mass on the most characteristic and accurately described behaviors while de-emphasizing the tails, and obtaining clean models from corrupted data (nonparametric denoising). This is the opposite of the smoothing step commonly applied to classification models. Drawing on rate-distortion theory, MERS seeks the minimum entropy-rate process under a constraint on the dissimilarity between the original and simplified processes. We particularly investigate the Kullback-Leibler divergence rate as a dissimilarity measure, where, compatible with our assumption that the starting model is disturbed or inaccurate, the simplification rather than the starting model is used for the reference distribution of the divergence. This leads to analytic solutions for stationary and ergodic Gaussian processes and Markov chains. The same formulas are also valid for maximum-entropy smoothing under the same divergence constraint. In experiments, MERS successfully simplifies and denoises models from audio, text, speech, and meteorology.}, keywords={Markov processes, stochastic processes, information theory, signal analysis synthesis and processing, language generation, statistical models}, month={Dec.}, publisher={IEEE}, volume={38}, number={12}, pages={2487--2500}, doi={10.1109/TPAMI.2016.2533382}, year={2016} }