@inproceedings{ghosh2020robust, title={Robust classification using hidden {M}arkov models and mixtures of normalizing flows}, author={Ghosh, Anubhab and Honor{\'e}, Antoine and Liu, Dong and Henter, Gustav Eje and Chatterjee, Saikat}, booktitle={Proc. MLSP}, abstract={We test the robustness of a maximum-likelihood (ML) based classifier where sequential data as observation is corrupted by noise. The hypothesis is that a generative model, that combines the state transitions of a hidden Markov model (HMM) and the neural network based probability distributions for the hidden states of the HMM, can provide a robust classification performance. The combined model is called normalizing-flow mixture model based HMM (NMM-HMM). It can be trained using a combination of expectation-maximization (EM) and backpropagation. We verify the improved robustness of NMM-HMM classifiers in an application to speech recognition.}, keywords={speech recognition, generative models, hidden Markov models, neural networks}, address={Espoo, Finland}, month={Sept.}, publisher={IEEE}, volume={30}, pages={1--6}, doi={10.1109/MLSP49062.2020.9231775}, year={2020} }