Skip to content

Instantly share code, notes, and snippets.

@bayerj
Created August 26, 2009 07:55
Show Gist options
  • Save bayerj/175391 to your computer and use it in GitHub Desktop.
Save bayerj/175391 to your computer and use it in GitHub Desktop.
# -*- coding: utf-8 -*-
"""This module is an evolino implementation alternative to the current
PyBrain one."""
__author__ = 'Justin S Bayer, [email protected]'
import scipy as sp
from pybrain.structure.networks import Module
from pybrain.structure.parametercontainer import ParameterContainer
from pybrain.supervised.trainers import Trainer
from pybrain.optimization import *
class EvolinoModule(Module, ParameterContainer):
"""Class to wrap a module that can be trained by evolino.
__init__ takes as an argument the innerModule and the desired output
dimension. Outputs of this module are then calculated by the innerModule's
output and an additional matrix multiplication of those outputs by the
module's own parameters.
"""
sequential = True
def __init__(self, innerModule, outdim):
Module.__init__(self, innerModule.indim, outdim)
ParameterContainer.__init__(self, outdim * innerModule.outdim)
self.innerModule = innerModule
def _forwardImplementation(self, inbuf, outbuf):
if not (self.offset + 1 < self.inputbuffer.shape[0]):
self._growBuffers()
innerout = self.innerModule.activate(inbuf)
oldshape = self.params.shape
self.params.shape = self.outdim, self.innerModule.outdim
outbuf[:] = sp.dot(self.params, innerout)
self.params.shape = oldshape
self.offset += 1
def _backwardImplementation(self, outerr, inerr, outbuf, inbuf):
raise NotImplementedError("No backward for EvolinoModules.")
def reset(self):
Module.reset(self)
self.innerModule.reset()
class EvolinoTrainer(Trainer):
def __init__(self, module, dataset):
if not isinstance(module, EvolinoModule):
raise ValueError("""Only EvolinoModule objects can be trained by the
EvolinoTrainer.""")
self.module = module
self.dataset = dataset
def _fitness(self, params):
"""Return the sum-of-squares error of a set of parameters for a module."""
self.module.innerModule.params[:] = params
# The nonlinear part just befor the linear outputs.
nonlinears = []
targets = []
for seq in self.dataset:
self.module.innerModule.reset()
for inpt, target in seq:
output = self.module.innerModule.activate(inpt)
nonlinears.append(output)
targets.append(target)
# Do pseudoinverse to get weights for outer layer.
A = sp.array(nonlinears)
y = sp.array(targets)
x = sp.dot(sp.linalg.pinv2(A), y)
self.module.params[:] = x.T.flatten()
# Accumulate error for fitness.
totalerror = 0
for seq in self.dataset:
self.module.reset()
for inpt, target in seq:
output = self.module.activate(inpt)
error = output - target
totalerror += sp.dot(error, error)
return totalerror
def train(self):
params = sp.random.standard_normal(
sp.size(self.module.innerModule.params)) * 0.01
self.learner.minimize = True
# TODO: These settings should be configurable.
self.learner = CMAES(self._fitness, params)
self.learner.maxLearningSteps = 100
self.learner.learn()
return self.learner.bestEvaluation
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment