Skip to content

Commit

Permalink
checked in a bunch of stuff I hadn't been tracking
Browse files Browse the repository at this point in the history
  • Loading branch information
Ian Goodfellow committed Oct 18, 2012
1 parent f32bc18 commit f0bcd99
Show file tree
Hide file tree
Showing 99 changed files with 6,150 additions and 0 deletions.
24 changes: 24 additions & 0 deletions datasets/hack_dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
import numpy as np

class HackDataset(DenseDesignMatrix):

def __init__(self, labels_from, X, start, stop):

super(HackDataset, self).__init__(X = X, y = labels_from.y)

convert_to_one_hot = True
if convert_to_one_hot:
if not ( self.y.min() == 0):
raise AssertionError("Expected y.min == 0 but y.min == "+str(self.y.min()))
nclass = self.y.max() + 1
y = np.zeros((self.y.shape[0], nclass), dtype='float32')
for i in xrange(self.y.shape[0]):
y[i,self.y[i]] = 1.
self.y = y

self.X = self.X[start:stop,:]
assert self.X.shape[0] == stop - start
self.y = self.y[start:stop,:]
assert self.y.shape[0] == stop - start

121 changes: 121 additions & 0 deletions dbm/inpaint/dbg.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
# like cifar10_N3 but with both dense and convolutional units
!obj:pylearn2.scripts.train.Train {
dataset: &data !obj:galatea.datasets.zca_dataset.ZCA_Dataset {
preprocessed_dataset: !pkl: "/data/lisa/data/cifar10/pylearn2_gcn_whitened/train.pkl",
preprocessor: !pkl: "/data/lisa/data/cifar10/pylearn2_gcn_whitened/preprocessor.pkl"
},
model: !obj:galatea.dbm.inpaint.super_dbm.SuperDBM {
batch_size : 2, # 50 failed
niter: 6, #note: since we have to backprop through the whole thing, this does
#increase the memory usage
visible_layer: !obj:galatea.dbm.inpaint.super_dbm.GaussianConvolutionalVisLayer {
rows: 32,
cols: 32,
channels: 3,
init_beta: 3.7,
init_mu: 0.
},
hidden_layers: [
!obj:galatea.dbm.inpaint.super_dbm.CompositeLayer {
layer_name: "h0",
components: [
!obj:galatea.dbm.inpaint.super_dbm.DenseMaxPool {
pool_size : 1,
detector_layer_dim: 400,
irange: 0.02,
init_bias: -1,
layer_name: 'h0_dense'
},
!obj:galatea.dbm.inpaint.super_dbm.ConvMaxPool {
border_mode : 'full',
output_channels: 64,
kernel_rows: 9,
kernel_cols: 9,
pool_rows: 2,
pool_cols: 2,
irange: 0.05,
layer_name: 'h0_conv',
init_bias: -5.
}
],
},
!obj:galatea.dbm.inpaint.super_dbm.CompositeLayer {
layer_name: "h1",
components: [
!obj:galatea.dbm.inpaint.super_dbm.DenseMaxPool {
pool_size : 1,
detector_layer_dim: 400,
irange: 0.02,
init_bias: -1,
layer_name: 'h1_dense'
},
!obj:galatea.dbm.inpaint.super_dbm.ConvMaxPool {
border_mode : 'full',
output_channels: 96,
kernel_rows: 5,
kernel_cols: 5,
pool_rows: 3,
pool_cols: 3,
irange: 0.3,
layer_name: 'h1_conv',
init_bias: -4.5
}
],
inputs_to_components: { 0: [0], 1: [0, 1] }
},
!obj:galatea.dbm.inpaint.super_dbm.CompositeLayer {
layer_name: "h2",
components: [
!obj:galatea.dbm.inpaint.super_dbm.DenseMaxPool {
pool_size : 1,
detector_layer_dim: 400,
irange: 0.02,
init_bias: -1,
layer_name: 'h2_dense'
},
!obj:galatea.dbm.inpaint.super_dbm.ConvMaxPool {
border_mode : 'full',
output_channels: 128,
kernel_rows: 3,
kernel_cols: 3,
pool_rows: 2,
pool_cols: 2,
irange: 0.3,
layer_name: 'h2_conv',
init_bias: -4.
}
],
inputs_to_components: { 0: [0], 1: [0, 1] }
}
]
},
algorithm: !obj:galatea.dbm.inpaint.inpaint_alg.InpaintAlgorithm {
batches_per_iter : 10,
monitoring_batches : 1,
monitoring_dataset : *data,
init_alpha : [0.256, 1.28, 2.56, 12.8, 25.6],
max_iter: 2,
cost : !obj:galatea.dbm.inpaint.super_inpaint.SuperInpaint {
both_directions : 1,
l1_act_targets: [
[ [.0, .0], [.06, .0]],
[ [.0, .0], [.12, .0]],
[ [.0, .0], [ .16, .0]]
],
l1_act_coeffs: [
[ [.0, .0], [1., 0.]],
[ [.0, .0], [1., 0.]],
[ [.0, .0], [ .1, 0.]]
],
noise : 1
},
mask_gen : !obj:galatea.dbm.inpaint.super_inpaint.MaskGen {
drop_prob: 0.5,
balance: 0,
sync_channels: 1
}
},
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}.pkl",
save_freq : 1
}

Loading

0 comments on commit f0bcd99

Please sign in to comment.