@techreport{a4f96ad99220429ba47f6816acf5ed63, title = "Palimpsest memories: a new high-capacity forgetful learning rule for Hopfield networks", abstract = "Palimpsest or forgetful learning rules for attractor neural networks do not suffer from catastrophic forgetting. Instead they selectively forget older memories in order to store new patterns. Standard palimpsest learning algorithms have a capacity of up to 0:05n, where n is the size of the network. Here a new learning rule is introduced. This rule is local and incremental. It is shown that it has palimpsest properties, and it has a palimpsest capacity of about 0:25n, much higher than the capacity of standard palimpsest schemes. It is shown that the algorithm acts as an iterated function sequence on the space of matrices, and this is used to illustrate the performance of the learning rule.", author = "Amos Storkey", year = "1998", language = "English", type = "WorkingPaper", }