Citation

BibTex format

@article{Chatzis:2012:10.1109/TNNLS.2012.2217986,
author = {Chatzis, SP and Demiris, Y},
doi = {10.1109/TNNLS.2012.2217986},
journal = {IEEE Transactions on Neural Networks},
pages = {1862--1871},
title = {Nonparametric mixtures of Gaussian processes with power-law behavior},
url = {http://dx.doi.org/10.1109/TNNLS.2012.2217986},
volume = {23},
year = {2012}
}

RIS format (EndNote, RefMan)

TY  - JOUR
AB - Gaussian processes (GPs) constitute one of the most important Bayesian machine learning approaches, based on a particularly effective method for placing a prior distribution overthe space of regression functions. Several researchers have considered postulating mixtures of Gaussian processes as a means ofdealing with non-stationary covariance functions, discontinuities, multi-modality, and overlapping output signals. In existing works, mixtures of Gaussian processes are based on the introduction of a gating function defined over the space of model input variables. This way, each postulated mixture component Gaussian process is effectively restricted in a limited subset of the input space. In this work, we follow a different approach: We consider a fully generative nonparametric Bayesian model with power-law behavior, generating Gaussian processes over the whole input space of the learned task. We provide an efficient algorithm for model inference, based on the variational Bayesian framework, and exhibit its efficacy using benchmark and real-world datasets.
AU - Chatzis,SP
AU - Demiris,Y
DO - 10.1109/TNNLS.2012.2217986
EP - 1871
PY - 2012///
SN - 2162-237X
SP - 1862
TI - Nonparametric mixtures of Gaussian processes with power-law behavior
T2 - IEEE Transactions on Neural Networks
UR - http://dx.doi.org/10.1109/TNNLS.2012.2217986
VL - 23
ER -