@inproceedings{16725d05de0247e1b5f74341002cd92c,
title = "Efficient training of LDA on a GPU by mean-for-mode estimation",
abstract = "We introduce Mean-for-Mode estimation, a variant of an uncollapsed Gibbs sampler that we use to train LDA on a GPU. The algorithm combines benefits of both uncollapsed and collapsed Gibbs samplers. Like a collapsed Gibbs sampler-and unlike an uncollapsed Gibbs sampler-it has good statistical performance, and can use sampling complexity reduction techniques such as sparsity. Meanwhile, like an uncollapsed Gibbs sampler-and unlike a collapsed Gibbs sampler-it is embarrassingly parallel, and can use approximate counters.",
author = "Tristan, {Jean Baptiste} and Joseph Tassarotti and Steele, {Guy L.}",
year = "2015",
language = "English (US)",
series = "32nd International Conference on Machine Learning, ICML 2015",
publisher = "International Machine Learning Society (IMLS)",
pages = "59--68",
editor = "Francis Bach and David Blei",
booktitle = "32nd International Conference on Machine Learning, ICML 2015",
note = "32nd International Conference on Machine Learning, ICML 2015 ; Conference date: 06-07-2015 Through 11-07-2015",
}