Time series with non-uniform intervals occur in many applications, and are
difficult to model using standard recurrent neural networks (RNNs). We
generalize RNNs to have continuous-time hidden dynamics defined by ordinary
differential equations (ODEs), a model we call ODE-RNNs. Furthermore, we use
ODE-RNNs to replace the recognition network of the recently-proposed Latent ODE
model. Both ODE-RNNs and Latent ODEs can naturally handle arbitrary time gaps
between observations, and can explicitly model the probability of observation
times using Poisson processes. We show experimentally that these ODE-based
models outperform their RNN-based counterparts on irregularly-sampled data.
Description
[1907.03907] Latent ODEs for Irregularly-Sampled Time Series
%0 Generic
%1 rubanova2019latent
%A Rubanova, Yulia
%A Chen, Ricky T. Q.
%A Duvenaud, David
%D 2019
%K from:adulny neural-ode time-series
%T Latent ODEs for Irregularly-Sampled Time Series
%U http://arxiv.org/abs/1907.03907
%X Time series with non-uniform intervals occur in many applications, and are
difficult to model using standard recurrent neural networks (RNNs). We
generalize RNNs to have continuous-time hidden dynamics defined by ordinary
differential equations (ODEs), a model we call ODE-RNNs. Furthermore, we use
ODE-RNNs to replace the recognition network of the recently-proposed Latent ODE
model. Both ODE-RNNs and Latent ODEs can naturally handle arbitrary time gaps
between observations, and can explicitly model the probability of observation
times using Poisson processes. We show experimentally that these ODE-based
models outperform their RNN-based counterparts on irregularly-sampled data.
@misc{rubanova2019latent,
abstract = {Time series with non-uniform intervals occur in many applications, and are
difficult to model using standard recurrent neural networks (RNNs). We
generalize RNNs to have continuous-time hidden dynamics defined by ordinary
differential equations (ODEs), a model we call ODE-RNNs. Furthermore, we use
ODE-RNNs to replace the recognition network of the recently-proposed Latent ODE
model. Both ODE-RNNs and Latent ODEs can naturally handle arbitrary time gaps
between observations, and can explicitly model the probability of observation
times using Poisson processes. We show experimentally that these ODE-based
models outperform their RNN-based counterparts on irregularly-sampled data.},
added-at = {2021-03-26T11:57:54.000+0100},
author = {Rubanova, Yulia and Chen, Ricky T. Q. and Duvenaud, David},
biburl = {https://www.bibsonomy.org/bibtex/29e45ef7da6d425aabca7d6748e7f860b/adulny},
description = {[1907.03907] Latent ODEs for Irregularly-Sampled Time Series},
interhash = {ffede83aeb26698e8253dc1d9471afc4},
intrahash = {9e45ef7da6d425aabca7d6748e7f860b},
keywords = {from:adulny neural-ode time-series},
note = {cite arxiv:1907.03907},
timestamp = {2021-03-26T11:57:54.000+0100},
title = {Latent ODEs for Irregularly-Sampled Time Series},
url = {http://arxiv.org/abs/1907.03907},
year = 2019
}