Citation

BibTex format

@article{Chatzis:2011:10.1109/TNN.2011.2162109,
author = {Chatzis, SP and Demiris, Y},
doi = {10.1109/TNN.2011.2162109},
journal = {IEEE Transactions on Neural Networks},
pages = {1435--1445},
title = {Echo State Gaussian Process},
url = {http://dx.doi.org/10.1109/TNN.2011.2162109},
volume = {22},
year = {2011}
}

RIS format (EndNote, RefMan)

TY  - JOUR
AB - Echo state networks (ESNs) constitute a novel approach to recurrent neural network (RNN) training, with an RNN (the reservoir) being generated randomly, and only a readout being trained using a simple computationally efficient algorithm. ESNs have greatly facilitated the practical application of RNNs, outperforming classical approaches on a number of benchmark tasks. In this paper, we introduce a novel Bayesian approach toward ESNs, the echo state Gaussian process (ESGP). The ESGP combines the merits of ESNs and Gaussian processes to provide a more robust alternative to conventional reservoir computing networks while also offering a measure of confidence on the generated predictions (in the form of a predictive distribution). We exhibit the merits of our approach in a number of applications, considering both benchmark datasets and real-world applications, where we show that our method offers a significant enhancement in the dynamical data modeling capabilities of ESNs. Additionally, we also show that our method is orders of magnitude more computationally efficient compared to existing Gaussian process-based methods for dynamical data modeling, without compromises in the obtained predictive performance.
AU - Chatzis,SP
AU - Demiris,Y
DO - 10.1109/TNN.2011.2162109
EP - 1445
PY - 2011///
SN - 1045-9227
SP - 1435
TI - Echo State Gaussian Process
T2 - IEEE Transactions on Neural Networks
UR - http://dx.doi.org/10.1109/TNN.2011.2162109
VL - 22
ER -