Skip to content

Commit

Permalink
config
Browse files Browse the repository at this point in the history
  • Loading branch information
Ian Goodfellow committed Nov 2, 2013
1 parent ae04261 commit 1deab96
Show file tree
Hide file tree
Showing 7 changed files with 410 additions and 0 deletions.
77 changes: 77 additions & 0 deletions sample_prop/agent_0.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 0,
stop: 50000
},
model: !obj:galatea.sample_prop.agent.AgentHive1 {
input_space: !obj:pylearn2.space.VectorSpace { dim: 784 },
layers: [
!obj:galatea.sample_prop.agent.LinearAgents {
layer_name: 'h0',
irange: .05,
#max_col_norm: 10.,
dim: 500
},
!obj:galatea.sample_prop.agent.LinearAgents {
layer_name: 'h1',
irange: .05,
#max_col_norm: 10.,
dim: 500
},
!obj:pylearn2.models.mlp.Softmax {
layer_name: 'y',
irange: 0.05,
n_classes: 10,
max_col_norm: 10
}
]
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
batch_size: 100,
set_batch_size: 1,
learning_rate: .005,
init_momentum: .5,
monitoring_dataset:
{
'train' : *train,
'valid' : !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 50000,
stop: 60000
}
},
cost : !obj:galatea.sample_prop.agent.AgentHive1Cost1 {
flip_prob: .01,
},
termination_criterion: !obj:pylearn2.termination_criteria.MonitorBased
{
channel_name: "valid_y_misclass",
N: 100,
prop_decrease: 0.
}
},
extensions: [
!obj:pylearn2.training_algorithms.sgd.MomentumAdjustor {
start: 0,
saturate: 200,
final_momentum: .9
},
#!obj:pylearn2.training_algorithms.sgd.LinearDecayOverEpoch {
# start: 1,
# saturate: 527,
# decay_factor: 0.006308
#},
!obj:pylearn2.train_extensions.best_params.MonitorBasedSaveBest {
channel_name: "valid_y_misclass",
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}_best.pkl"
},
],
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}.pkl",
save_freq : 1
}

48 changes: 48 additions & 0 deletions sample_prop/sgd_mnist_9r.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 0,
stop: 50000
},
model: !obj:galatea.sample_prop.basic.SimpleModel {
nvis: 784,
num_hid: 500,
num_class: 10
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
batch_size: 1000,
set_batch_size: 1,
learning_rate: 1e-1,
init_momentum: .5,
monitoring_dataset:
{
'train' : *train,
'valid' : !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 50000,
stop: 60000
}
},
cost : !obj:galatea.sample_prop.basic.SamplingCost {
weight_decay_1: .000001,
weight_decay_2: .000001
},
},
extensions: [ !obj:pylearn2.training_algorithms.sgd.MomentumAdjustor {
start: 0,
saturate: 200,
final_momentum: .99
},
!obj:pylearn2.training_algorithms.sgd.OneOverEpoch {
start: 100,
half_life: 10
}
],
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}.pkl",
save_freq : 1
}

51 changes: 51 additions & 0 deletions sample_prop/sgd_mnist_Lr.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 0,
stop: 50000
},
model: !obj:galatea.sample_prop.basic.SimpleModel2 {
nvis: 784,
num_hid: 500,
num_hid_2: 500,
num_class: 10
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
batch_size: 100,
set_batch_size: 1,
learning_rate: 5e-2,
init_momentum: .5,
monitoring_dataset:
{
'train' : *train,
'valid' : !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 50000,
stop: 60000
}
},
cost : !obj:galatea.sample_prop.basic.SamplingCost3 {
},
},
extensions: [ !obj:pylearn2.training_algorithms.sgd.MomentumAdjustor {
start: 0,
saturate: 200,
final_momentum: .9
},
#!obj:pylearn2.training_algorithms.sgd.OneOverEpoch {
# start: 100,
# half_life: 5
#},
!obj:pylearn2.training_algorithms.sgd.PolyakAveraging
{
start: 100
}
],
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}.pkl",
save_freq : 100
}

49 changes: 49 additions & 0 deletions sample_prop/sgd_mnist_new_0.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 0,
stop: 50000
},
model: !obj:galatea.sample_prop.basic.SimpleModel2 {
nvis: 784,
num_hid: 500,
num_hid_2: 500,
num_class: 10,
y_max_col_norm: 10
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
batch_size: 100,
set_batch_size: 1,
learning_rate: 5e-2,
init_momentum: .5,
monitoring_dataset:
{
'train' : *train,
'valid' : !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 50000,
stop: 60000
}
},
cost : !obj:galatea.sample_prop.basic.SamplingCost3 {
},
},
extensions: [
!obj:pylearn2.training_algorithms.sgd.MomentumAdjustor {
start: 0,
saturate: 200,
final_momentum: .9
},
#!obj:pylearn2.training_algorithms.sgd.OneOverEpoch {
# start: 100,
# half_life: 5
#},
],
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}.pkl",
save_freq : 100
}

60 changes: 60 additions & 0 deletions sample_prop/sgd_mnist_new_1.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 0,
stop: 50000
},
model: !obj:galatea.sample_prop.basic.SimpleModel2 {
nvis: 784,
num_hid: 500,
num_hid_2: 500,
num_class: 10,
y_max_col_norm: 10
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
batch_size: 100,
set_batch_size: 1,
learning_rate: 5e-2,
init_momentum: .5,
monitoring_dataset:
{
'train' : *train,
'valid' : !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 50000,
stop: 60000
}
},
cost : !obj:galatea.sample_prop.basic.SamplingCost3 {
},
termination_criterion: !obj:pylearn2.termination_criteria.MonitorBased
{
channel_name: "valid_y_misclass",
N: 100,
prop_decrease: 0.
}
},
extensions: [
!obj:pylearn2.training_algorithms.sgd.MomentumAdjustor {
start: 0,
saturate: 200,
final_momentum: .9
},
!obj:pylearn2.training_algorithms.sgd.LinearDecayOverEpoch {
start: 1,
saturate: 527,
decay_factor: 0.006308
},
!obj:pylearn2.train_extensions.best_params.MonitorBasedSaveBest {
channel_name: "valid_y_misclass",
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}_best.pkl"
},
],
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}.pkl",
save_freq : 100
}

62 changes: 62 additions & 0 deletions sample_prop/sgd_mnist_new_2.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
!obj:pylearn2.train.Train {
dataset: &train !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 0,
stop: 50000
},
model: !obj:galatea.sample_prop.basic.SimpleModel2 {
nvis: 784,
num_hid: 500,
num_hid_2: 500,
num_class: 10,
#h0_max_col_norm: 3.,
#h1_max_col_norm: 2.,
y_max_col_norm: 10
},
algorithm: !obj:pylearn2.training_algorithms.sgd.SGD {
batch_size: 100,
set_batch_size: 1,
learning_rate: .05,
init_momentum: .5,
monitoring_dataset:
{
'train' : *train,
'valid' : !obj:pylearn2.datasets.mnist.MNIST {
which_set: "train",
binarize: 1,
one_hot: 1,
start: 50000,
stop: 60000
}
},
cost : !obj:galatea.sample_prop.basic.SamplingCost3 {
},
termination_criterion: !obj:pylearn2.termination_criteria.MonitorBased
{
channel_name: "valid_y_misclass",
N: 100,
prop_decrease: 0.
}
},
extensions: [
!obj:pylearn2.training_algorithms.sgd.MomentumAdjustor {
start: 0,
saturate: 200,
final_momentum: .9
},
#!obj:pylearn2.training_algorithms.sgd.LinearDecayOverEpoch {
# start: 1,
# saturate: 527,
# decay_factor: 0.006308
#},
!obj:pylearn2.train_extensions.best_params.MonitorBasedSaveBest {
channel_name: "valid_y_misclass",
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}_best.pkl"
},
],
save_path: "${PYLEARN2_TRAIN_FILE_FULL_STEM}.pkl",
save_freq : 100
}

Loading

0 comments on commit 1deab96

Please sign in to comment.