transform

old TransE-like models
git clone https://esimon.eu/repos/transform.git
Log | Files | Refs | README

base.py (1791B)


      1 #!/usr/bin/env python2
      2 
      3 import numpy
      4 import theano
      5 import theano.tensor as T
      6 import theano.sparse as S
      7 
      8 class Base_relation(object):
      9     """ Base relation class. """
     10 
     11     def __init__(self, rng, number, parameters, tag):
     12         """ Initialise the parameter.
     13 
     14         Keyword arguments:
     15         rng -- module for random number generation
     16         number -- number of relation
     17         parameters -- dictionary of the form {name: shape} describing the relations parameters
     18         tag -- name of the relations for parameter declaration
     19         """
     20 
     21         self.number = number
     22         self.parameters = []
     23 
     24         for name, shape in parameters:
     25             dimension = sum(shape)
     26             bound = numpy.sqrt(6. / dimension)
     27             values = rng.uniform(low=-bound, high=bound, size=(number,)+shape)
     28             values = values / numpy.sqrt(numpy.sum(values **2, axis=1))[:, numpy.newaxis]
     29             var = theano.shared(name=tag+'.'+name, value=numpy.asarray(values, dtype=theano.config.floatX))
     30             setattr(self, name, var)
     31             self.parameters.append(var)
     32 
     33     def lookup(self, relations):
     34         """ Embed given relations. """
     35         return map(lambda parameter: S.dot(relations, parameter), self.parameters)
     36 
     37     def transform(self, inputs, relations):
     38         """ Transform the given input w.r.t. the given relations. """
     39         return self.apply(inputs, *relations)
     40 
     41     def updates(self, cost, learning_rate):
     42         """ Compute the updates to perform a SGD step w.r.t. a given cost.
     43 
     44         Keyword arguments:
     45         cost -- The cost to optimise.
     46         learning_rate -- The learning rate used for gradient descent.
     47         """
     48         return [ (parameter, parameter - learning_rate * T.grad(cost=cost, wrt=parameter)) for parameter in self.parameters ]