@inproceedings{aceea7bb6312431794d6fcd05507b2ea,
title = "Generalization bounds for time series prediction with non-stationary processes",
abstract = "This paper presents the first generalization bounds for time series prediction with a non-stationary mixing stochastic process. We prove Rademacher complexity learning bounds for both averagepath generalization with non-stationary β-mixing processes and pathdependent generalization with non-stationary φ-mixing processes. Our guarantees are expressed in terms of β- or φ-mixing coefficients and a natural measure of discrepancy between training and target distributions. They admit as special cases previous Rademacher complexity bounds for non-i.i.d. stationary distributions, for independent but not identically distributed random variables, or for the i.i.d. case. We show that, using a new sub-sample selection technique we introduce, our bounds can be tightened under the natural assumption of convergent stochastic processes. We also prove that fast learning rates can be achieved by extending existing local Rademacher complexity analysis to non-i.i.d. setting.",
keywords = "Fast rates, Generalization bounds, Local Rademacher complexity, Mixing, Stationary processes, Time series",
author = "Vitaly Kuznetsov and Mehryar Mohri",
note = "Publisher Copyright: {\textcopyright} Springer International Publishing Switzerland 2014.; 25th International Conference on Algorithmic Learning Theory, ALT 2014 ; Conference date: 08-10-2014 Through 10-10-2014",
year = "2014",
doi = "10.1007/978-3-319-11662-4_19",
language = "English (US)",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Verlag",
pages = "260--274",
editor = "Peter Auer and Alexander Clark and Thomas Zeugmann and Sandra Zilles",
booktitle = "Algorithmic Learning Theory - 25th International Conference, ALT 2014, Proceedings",
}