@inproceedings{cd1528ba4eee454c998226e6cf9da92a,
title = "Constraining the dynamics of deep probabilistic models",
abstract = "We introduce a novel generative formulation of deep probabilistic models implementing {"}soft{"} constraints on their function dynamics. In particular, we develop a flexible methodological framework where the modeled functions and derivatives of a given order are subject to inequality or equality constraints. We then characterize the posterior distribution over model and constraint parameters through stochastic variational inference. As a result, the proposed approach allows for accurate and scalable uncertainty quantification on the predictions and on all parameters. We demonstrate the application of equality constraints in the challenging problem of parameter inference in ordinary differential equation models, while we showcase the application of inequality constraints on the problem of monotonic regression of count data. The proposed approach is extensively tested in several experimental settings, leading to highly competitive results in challenging modeling applications, while offering high expressiveness, flexi-bility and scalability.",
author = "Marco Lorenzi and Maurizio Filippone",
note = "Publisher Copyright: {\textcopyright} Copyright 2018 by the author(s).; 35th International Conference on Machine Learning, ICML 2018 ; Conference date: 10-07-2018 Through 15-07-2018",
year = "2018",
language = "English (US)",
series = "35th International Conference on Machine Learning, ICML 2018",
publisher = "International Machine Learning Society (IMLS)",
pages = "5089--5101",
editor = "Jennifer Dy and Andreas Krause",
booktitle = "35th International Conference on Machine Learning, ICML 2018",
}