This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
""" | |
Multilayer Perceptron | |
""" | |
__authors__ = "Ian Goodfellow" | |
__copyright__ = "Copyright 2012-2013, Universite de Montreal" | |
__credits__ = ["Ian Goodfellow", "David Warde-Farley"] | |
__license__ = "3-clause BSD" | |
__maintainer__ = "Ian Goodfellow" | |
import math |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
!obj:pylearn2.train.Train { | |
dataset: &train !obj:pylearn2.datasets.mnist.MNIST { | |
which_set: 'train', | |
one_hot: 1, | |
start: 0, | |
stop: 50000 | |
}, | |
model: !obj:pylearn2.models.mlp.MLP { | |
batch_size: 128, | |
layers: [ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
!obj:pylearn2.train.Train { | |
dataset: &train !obj:pylearn2.datasets.mnist.MNIST { | |
which_set: 'train', | |
one_hot: 1, | |
axes: ['c', 0, 1, 'b'], | |
start: 0, | |
stop: 50000 | |
}, | |
model: !obj:pylearn2.models.mlp.MLP { | |
batch_size: 128, |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
class ConditionedRecurrent(BaseRecurrent): | |
def __init__(self, wrapped, **kwargs): | |
super(ConditionedRecurrent, self).__init__(**kwargs) | |
self.wrapped = wrapped | |
self.children=[wrapped] | |
def get_dim(self, name): | |
if name == 'context': | |
return self.wrapped.get_dim('inputs') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# You are supposed to run this after the `Batch Normalization Tutorial` | |
import theano | |
import numpy as np | |
outputs = VariableFilter( | |
bricks=mlp.linear_transformations, roles=[OUTPUT])(cg_bn.variables) | |
f = [] | |
for o, g, b in zip(outputs, gammas, betas): | |
f.append( |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# We are training this DRAW network | |
# using a model similar to the one | |
# described in the paper http://arxiv.org/pdf/1502.04623.pdf | |
# | |
# Dataset: Binary-MNIST from mila-udem/fuel | |
from keras.initializations import normal | |
from seya.layers.draw import DRAW | |
def myinit(shape): | |
return normal(shape, scale=.01) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# NOTE: I'm not sure if this is right | |
from keras.layers.recurrent import LSTM | |
class LSTMpeephole(LSTM): | |
def __init__(self, **kwargs): | |
super(LSTMpeephole, self).__init__(**kwargs) | |
def build(self): | |
super(LSTMpeephole, self).build() |
OlderNewer