@inproceedings{e3214560e35c4a519745a3aeed020e84,
title = "On Transductive Regression",
abstract = "In many modern large-scale learning applications, the amount of unlabeled data far exceeds that of labeled data. A common instance of this problem is the transductive setting where the unlabeled test points are known to the learning algorithm. This paper presents a study of regression problems in that setting. It presents explicit VC-dimension error bounds for transductive regression that hold for all bounded loss functions and coincide with the tight classification bounds of Vapnik when applied to classification. It also presents a new transductive regression algorithm inspired by our bound that admits a primal and kernelized closed-form solution and deals efficiently with large amounts of unlabeled data. The algorithm exploits the position of unlabeled points to locally estimate their labels and then uses a global optimization to ensure robust predictions. Our study also includes the results of experiments with several publicly available regression data sets with up to 20,000 unlabeled examples. The comparison with other transductive regression algorithms shows that it performs well and that it can scale to large data sets.",
author = "Corinna Cortes and Mehryar Mohri",
note = "Publisher Copyright: {\textcopyright} NIPS 2006.All rights reserved; 19th International Conference on Neural Information Processing Systems, NIPS 2006 ; Conference date: 04-12-2006 Through 07-12-2006",
year = "2006",
language = "English (US)",
series = "NIPS 2006: Proceedings of the 19th International Conference on Neural Information Processing Systems",
publisher = "MIT Press Journals",
pages = "305--312",
editor = "Bernhard Scholkopf and Platt, {John C.} and Thomas Hofmann",
booktitle = "NIPS 2006",
}