Skip to content

Commit

Permalink
solved all the conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
Hatef Monajemi committed May 23, 2018
2 parents 2929f32 + 3c1b5c5 commit 5c28eac
Show file tree
Hide file tree
Showing 58 changed files with 8,108 additions and 1,337 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
.DS_Store
CJlog
/src/external/firebase/t
/example/Python/pytorch/mnist/data
1 change: 1 addition & 0 deletions .ssh_config.md5
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
MD5 (/Users/hatef/github_projects/CJ/clusterjob/ssh_config) = 2b4285a9e9bbc85dec08872803880058
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ Clusterjob, hereafter CJ, is an automation software system, written mainly in pe
computational jobs to clusters in a hassle-free and reproducible manner.
CJ produces 'reporoducible' computational packages for academic
publications at no-cost. CJ project started in 2013 at Stanford University by Hatef Monajemi and his PhD advisor David L. Donoho with the goal of encouraging more efficient and reproducible research paradigm.
CJ is currently under development. Current implementation allows submission of MATLAB jobs.
CJ is currently under development. Current implementation allows submission of MATLAB and Python jobs.
In the future versions, we hope to include other heavily used programming languages
such as Python and R.
such as R and Julia.

You can read more about CJ on http://clusterjob.org

Expand Down
3 changes: 2 additions & 1 deletion cj_config
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
CJID moosh
CJKEY eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJhZG1pbiI6ZmFsc2UsImRlYnVnIjpmYWxzZSwiZCI6eyJ1aWQiOiJtb29zaCIsImNqcGFzc2NvZGUiOiIyYjM0ZTBiZTM2MDljYzE4MzQwMDg4MGZiNTEwMzhlNiJ9LCJ2IjowLCJpYXQiOjAsImV4cCI6MjE0NzQ4MzY0N30.kPWllSAJTLeZAYlkNW81JQaiKip2onnAxaL__z7FKRg
SYNC_TYPE manual
SYNC_INTERVAL 300
SYNC_INTERVAL 300

1 change: 1 addition & 0 deletions dep.pl
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
sudo cpan -i Data::Dumper Data::UUID FindBin File::chdir File::Basename File::Spec IO::Socket::INET IO::Socket::SSL Getopt::Declare Term::ReadLine JSON::PP JSON::XS Digest::SHA Time::Local Time::Piece Moo HTTP::Thin HTTP::Request::Common JSON URI
Empty file added example/MATLAB/.hatef
Empty file.
14 changes: 14 additions & 0 deletions example/MATLAB/dev-cj
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/bin/bash
if [ "$1" == "run" ] && [ "$2" == "default" ];then
perl ../../src/CJ.pl run myMatlabJob.m sherlock -dep ./ -m "test of CJ run" -mem "2G"
elif [ "$1" == "rrun" ] && [ "$2" == "default" ];then
perl ../../src/CJ.pl rrun simpleExample.m sherlock -dep ./ -m "test of CJ simple rrun" -mem "2G"
elif [ "$1" == "parrun" ] && [ "$2" == "default" ];then
perl ../../src/CJ.pl parrun simpleExample.m sherlock -dep ./ -m "test of CJ simple parrun" -mem "2G"
elif [ "$1" == "parrun" ] && [ "$2" == "matlab" ];then
perl ../../src/CJ.pl parrun myMatlabJob.m sherlock -dep ./ -m "test of CJ parrun -- invoking Matlab for range" -mem "2G"
elif [ "$1" == "reduce" ] && [ "$2" == "default" ];then
perl ../../src/CJ.pl reduce Results.txt
else
perl ../../src/CJ.pl "$@"
fi
11 changes: 7 additions & 4 deletions example/MATLAB/example_reduce.m
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,16 @@
clc



L = [1/8,2/8,3/8,4,5, 7e-1/8]

% Always initiate your outputs
output.myStructCell = cell(5,5);
output.myCharCell = cell(5,5);
output.myMatrix = zeros(5,5);
% otherwise reduce will not work

output.myStructCell = cell(6,5);
output.myCharCell = cell(6,5);
output.myMatrix = zeros(6,5);

L = [1/8,2/8,3/8,4,5, 7e-1/8]

for i = 1:length(L)
for j = 1:5
Expand Down
26 changes: 13 additions & 13 deletions example/MATLAB/myMatlabJob.m
Original file line number Diff line number Diff line change
Expand Up @@ -15,19 +15,19 @@
fprintf(fid, '%s, %s, %s, %s\n','SUID','counter1', 'counter2','random_number');

for i = 1:length(l)
for j = 1:length(k)
counter = (i-1)*length(k) + j ;
% open a file for testing gather
file2 = sprintf('file_%i', counter);
fid2 = fopen(file2,'at');
fprintf(fid2, '%i\n', counter);
fclose(fid2)


% File for testing reduce
fprintf(fid, '%s, %i,%i, %f\n', SUID,i,j,r(j));
end
for j = 1:length(k)

counter = (i-1)*length(k) + j ;
% open a file for testing gather
file2 = sprintf('file_%i', counter);
fid2 = fopen(file2,'at');
fprintf(fid2, '%i\n', counter);
fclose(fid2)


% File for testing reduce
fprintf(fid, '%s, %i,%i, %f\n', SUID,i,j,r(j));
end
end

fclose(fid);
58 changes: 58 additions & 0 deletions example/MATLAB/r_superres1d.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@

close all
clear all



pd.fc=10;
pd.SRFid=30; % must be even

pd.method='l2constr';
pd.oracle='nooracle';

pd.fid=pd.SRFid*pd.fc;
pd.fhi=pd.fc:pd.fc:pd.fid;
pd.SRF=pd.fhi/pd.fc;
pd.N=2*pd.fid; %must be even

pd.dpoints=2; % number of independent examples generated for each selection of parameters
pd.alpha=1; %spike dynamic range

pd.n=2*pd.fc+1; % ?? do I need n?

P=[25,50,75,100,25e5,50e5,75e5,100e5];
r=[1];
d=[1,2];

output.param=cell(length(P),length(r),length(d));
%output.result=cell(length(P),length(r),length(d));

%output=zeros(length(P),length(r),length(d))



for i = 1:length(P)
for j = 1:length(r)
for k = 1:length(d)

pd.P=P(i);
pd.r=r(j);
pd.d=d(k);


output.param{i,j,k} = pd;
% output.result{i,j,k}=run_superres_1d_fixedparam(pd);

filename='Results.mat';
savestr = sprintf('save ''%s'' output', filename);
eval(savestr);
fprintf('CREATED OUTPUT FILE %s EXPERIMENT COMPLETE\n',filename);


end
end
end




19 changes: 11 additions & 8 deletions example/MATLAB/simpleExample.m
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,16 @@

file = 'results.txt';

for i = 1:3
for j = 1:5
% write to a text file for testing reduce
fid = fopen(file,'at');
fprintf(fid, '%i,%i,%i\n', i,j,i+j);
fclose(fid)
end

n_list = [2^5]

for i = 1:length(n_list)
for j = 1:4
% write to a text file for testing reduce
fid = fopen(file,'at');
fprintf(fid, '%i,%i,%i\n', i,j,i+j);
fclose(fid)
end
end



Empty file added example/Python/DEP/dep.py
Empty file.
14 changes: 14 additions & 0 deletions example/Python/dev-cj
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/bin/bash
if [ "$1" == "run" ] && [ "$2" == "default" ];then
perl ../../src/CJ.pl run simpleExample.py sherlock -dep ./ -m "test of CJ run" -mem "2G"
elif [ "$1" == "rrun" ] && [ "$2" == "default" ];then
perl ../../src/CJ.pl rrun simpleExample.py sherlock -dep ./ -m "test of CJ simple rrun" -mem "2G"
elif [ "$1" == "parrun" ] && [ "$2" == "default" ];then
perl ../../src/CJ.pl parrun simpleExample.py sherlock -dep ./ -m "test of CJ simple parrun" -mem "2G"
elif [ "$1" == "parrun" ] && [ "$2" == "python" ];then
perl ../../src/CJ.pl parrun myMatlabJob.py sherlock -dep ./ -m "test of CJ parrun -- invoking Matlab for range" -mem "2G"
elif [ "$1" == "reduce" ] && [ "$2" == "default" ];then
perl ../../src/CJ.pl reduce Results.csv
else
perl ../../src/CJ.pl "$@"
fi
172 changes: 172 additions & 0 deletions example/Python/pytorch/mnist/mnist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
# DCNN Tranining Example
# Data: MNIST
# Author: Hatef Monajemi (monajemi AT stanford DOT edu)
# Date: Aug 2017
# Stanford, CA

import numpy, os.path
#import matplotlib.pyplot as plt
import torch
import torchvision
import torchvision.transforms as transforms
from torch.autograd import Variable

use_gpu = torch.cuda.is_available()


# Set the seed for pytorch
seed = 1915;
numpy.random.seed(seed)
torch.manual_seed(seed)
if use_gpu:
torch.cuda.manual_seed(seed)
print('using GPU')
else:
print('using CPUs only')


# load data using torchvision and do some transformations
batchSize=4;
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])
training_data = torchvision.datasets.MNIST(root='./data', train=True , download=True, transform=transform);
test_data = torchvision.datasets.MNIST(root='./data', train=False, download=True, transform=transform);


# build a trainloader to sample data
trainloader = torch.utils.data.DataLoader(training_data , batch_size=batchSize, shuffle=True, num_workers=2)
testloader = torch.utils.data.DataLoader(test_data , batch_size=batchSize, shuffle=True, num_workers=2)

###############################################
## Experiments with images to get familir with
## them
## functions to show image
#from torchvision.utils import make_grid;
#def imshow(img):
# img = img / 2 + 0.5 # unnormalize
# npimg = img.numpy()
# plt.imshow(numpy.transpose(npimg, (1, 2, 0)))
#
#
#
## get some random training images
#dataiter = iter(trainloader)
#images, labels = dataiter.next()
#
## show images
#imshow(make_grid(images))
## print labels
#print(' '.join('%5s' % labels[j] for j in range(4)))
###############################################


# Define a CNN
class CNN(torch.nn.Module):
def __init__(self):
super(CNN,self).__init__();
self.conv1 = torch.nn.Conv2d(1,10,5) # 1 input Channel, 10 output Channel, 5x5 filter (28 -> 24)
self.relu = torch.nn.ReLU();
self.pool = torch.nn.MaxPool2d(2,stride=2); #(24 -> 12)
self.fc1 = torch.nn.Linear(10*12*12, 120);
self.fc2 = torch.nn.Linear(120,10);

def forward(self,x):
x = self.pool(self.relu(self.conv1(x)))
x = x.view(-1,10*12*12); # reshape it to a row vector
x = self.relu(self.fc1(x));
x = self.fc2(x)
return x;

model = CNN();

if use_gpu:
model = model.cuda()


# initiate model parameters with the ones we have, if any
if os.path.exists('model_params.pt'):
model.load_state_dict(torch.load('model_params.pt'))





loss_fn = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9)



running_loss = 0.0;
for epoch in range(4):

for i, data in enumerate(trainloader,0):
# read inputs and labels

inputs, labels = data;
# wrap them in Variable
if use_gpu:
inputs = Variable(inputs.cuda())
labels = Variable(labels.cuda())
else:
inputs, labels = Variable(inputs), Variable(labels)

# generate prediction
preds = model(inputs);
# compute loss
loss = loss_fn(preds,labels);

# update the weights by backprop algo
optimizer.zero_grad() # zero the gradients from previous calls
loss.backward(); # compute gradient of loss w.r.t all parameters
optimizer.step(); # This updates all the parameters of the model

# print some statistics of loss
running_loss += loss.data[0];
if i % 2000 == 1999:
print('loss[%-2i,%6i] -> %3.2f' % (epoch+1,i+1,running_loss))
running_loss = 0.0;

print('Done training');


###############################################
## Predict for 4 images
#dataiter = iter(testloader)
#images, labels = dataiter.next()
#
## print images
#imshow(torchvision.utils.make_grid(images))
#print('GroundTruth: ', ' '.join('%5s' % labels[j] for j in range(4)))
#
#prediction = model(Variable(images));
#_, predicted = torch.max(prediction.data, 1)
#print('Predicted: ', ' '.join('%5s' % predicted[j][0] for j in range(4)))
###############################################



# Whole data set
correct = 0
total = 0
for data in testloader:
inputs, labels = data
# wrap them in Variable
if use_gpu:
inputs = Variable(inputs.cuda())
else:
inputs = Variable(inputs)
prediction = model(inputs);
_, predicted = torch.max(prediction.data, 1)
total += labels.size(0)
correct += (predicted.cpu() == labels).sum()

print('Accuracy of the network on %i test images of MNIST: %3.2f %%' % (total, 100 * correct / total))



# save the model params for future use:
torch.save(model.state_dict(), 'model_params.pt');
# To reload later
#model = CNN();
#model.load_state_dict(torch.load(PATH))
19 changes: 19 additions & 0 deletions example/Python/simpleExample.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# This is a test Python script for CJ
# Author: Hatef Monajemi June 11 2017
import numpy as np;
import csv;

SUID = 'monajemi'
file = SUID+'_results.csv';

Var0 = np.array([1,2,3]);
Var1 = np.array([1,2,3,4,10,5]);
with open('file.txt','w') as myfile:
for i in range(len(Var0)):
for j in range(len(Var1)): # This is a comment
# write to a text file for testing reduce
with open(file,'a') as csvfile:
resultswriter = csv.writer(csvfile,delimiter=',');
resultswriter.writerow([i,j,i+j]);


File renamed without changes.
Empty file added example/R/touch
Empty file.
8 changes: 8 additions & 0 deletions src/.app_list
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"anaconda":{"version":"Anaconda3-4.4.0-Linux-x86_64","space":"6.9G", "install_time":"10-20 min"},
"miniconda":{"version":"Miniconda3-latest-Linux-x86_64", "space":"392M", "install_time":" 1-5 min"},
"cvx":{"version":"cvx-rd", "space":"108M", "install_time":"20-60 sec"},
"matlab":{"version":"", "space":"", "install_time":""},
"composer":{"version":"latest", "space":"", "install_time":""}

}
Loading

0 comments on commit 5c28eac

Please sign in to comment.