@inproceedings{570552d4fca942f5901d1aa6fe7c75c5,
title = "Double backpropagation increasing generalization performance",
abstract = "One test of a new training algorithm is how well the algorithm generalizes from the training data to the test data. It is shown that a new training algorithm termed double backpropagation improves generalization by simultaneously minimizing the normal energy term found in backpropagation and an additional energy term that is related to the sum of the squares of the input derivatives (gradients). In normal backpropagation training, minimizing the energy function tends to push the input gradient to zero. However, this is not always possible. Double backpropagation explicitly pushes the input gradients to zero, making the minimum broader, and increases the generalization on the test data. The authors show the improvement over normal backpropagation on four candidate architectures and a training set of 320 handwritten numbers and a test set of size 180.",
author = "Harris Drucker and {Le Cun}, Yann",
year = "1992",
language = "English (US)",
isbn = "0780301641",
series = "Proceedings. IJCNN - International Joint Conference on Neural Networks",
publisher = "Publ by IEEE",
pages = "145--150",
editor = "Anon",
booktitle = "Proceedings. IJCNN - International Joint Conference on Neural Networks",
note = "International Joint Conference on Neural Networks - IJCNN-91-Seattle ; Conference date: 08-07-1991 Through 12-07-1991",
}