@inproceedings{199cabbad38a44c9a02b8f9812dd304c,
title = "Transformation Equivariant Boltzmann Machines",
abstract = "We develop a novel modeling framework for Boltzmann machines, augmenting each hidden unit with a latent transformation assignment variable which describes the selection of the transformed view of the canonical connection weights associated with the unit. This enables the inferences of the model to transform in response to transformed input data in a stable and predictable way, and avoids learning multiple features differing only with respect to the set of transformations. Extending prior work on translation equivariant (convolutional) models, we develop translation and rotation equivariant restricted Boltzmann machines (RBMs) and deep belief nets (DBNs), and demonstrate their effectiveness in learning frequently occurring statistical structure from artificial and natural images.",
keywords = "steerable filters, image modeling, Boltzmann machines, transformation invariance, transformation equivariant representations, convolutional structures",
author = "Kivinen, {Jyri J.} and Williams, {Christopher K. I.}",
year = "2011",
doi = "10.1007/978-3-642-21735-7_1",
language = "English",
isbn = "978-3-642-21734-0",
series = "Lecture Notes in Computer Science",
publisher = "Springer-Verlag GmbH",
pages = "1--9",
editor = "Timo Honkela and Wlodzislaw Duch and Mark Girolami and Samuel Kaski",
booktitle = "Artificial Neural Networks and Machine Learning - ICANN 2011",
}