transform

old TransE-like models
git clone https://esimon.eu/repos/transform.git
Log | Files | Refs | README

commit b1e638bb2a94157b60866c12a0538457b83d6c59
parent 754e753c3b20c82954d5e60d953aa20c01410ab5
Author: Étienne Simon <esimon@esimon.eu>
Date:   Fri, 18 Apr 2014 13:59:36 +0200

Add embeddings normaliser

Diffstat:
Membeddings.py | 4++++
Mmodel.py | 2++
2 files changed, 6 insertions(+), 0 deletions(-)

diff --git a/embeddings.py b/embeddings.py @@ -47,3 +47,7 @@ class Embeddings(object): learning_rate -- The learning rate used for gradient descent. """ return [(self.E, self.E - learning_rate * T.grad(cost=cost, wrt=self.E))] + + def normalise_updates(self): + """ Normalise the embeddings' L2 norm to 1. """ + return [(self.E, self.E / T.sqrt(T.sum(self.E **2, axis=1)] diff --git a/model.py b/model.py @@ -89,6 +89,7 @@ class Model(object): criterion = T.mean(violating_margin*score) self.train_function = theano.function(inputs=list(inputs), outputs=[criterion], updates=self.updates(criterion)) + self.normalise_function = theano.function(inputs=[], outputs=[], updates=self.embeddings.normalise_updates()) relation = T.addbroadcast(relation, 0) broadcasted_left = T.addbroadcast(positive_left, 0) @@ -124,6 +125,7 @@ class Model(object): for (relation, left_positive, right_positive, left_negative, right_negative) in self.dataset.training_minibatch(batch_size): c1=self.train_function(relation, left_positive, right_positive, left_positive, right_negative) c2=self.train_function(relation, left_positive, right_positive, left_negative, right_positive) + self.normalise_function() def error(self, name): """ Compute the mean rank and top 10 on a given data. """