-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathoverturn_local.py
executable file
·93 lines (83 loc) · 3.4 KB
/
overturn_local.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
#!/usr/bin/env python
"""Execo engine to explore parameter space.
This script is meant to run directly on the cluster
to avoid the many ssh processes to at each stage.
Here, the effect of Ra, Rcmb (and Pphase) on the
overturn following the crystallization of the magma ocean.
Jobs sent to Rangiroa
"""
import os
import f90nml
from threading import Thread
from execo import sleep
from execo_engine import Engine, ParamSweeper, sweep, logger, slugify
import math
import subprocess as sp
#Define general parameters
# server on which jobs are ran. Needs ssh access.
jobserver = 'rangiroa'
# Remote directory in which all jobs are sent
parent_dir = '/home/stephane/Overturn/Ched2/'
class overturn(Engine):
def create_sweeper(self):
"""Define the parameter space and return a sweeper."""
parameters = {
'RA': ['1.e5', '1.e6'],
'RCMB' : [1.19, 3.29],
'KFe' : [0.85, 0.9]
}
sweeps = sweep(parameters)
self.sweeper = ParamSweeper(os.path.join(self.result_dir, "sweeps"),
sweeps)
def create_par_file(self, comb):
"""Create Run directory on remote server and upload par file"""
logger.info('Creating par file')
comb_dir = parent_dir + slugify(comb) + '/'
logger.info('comb_dir = ' + comb_dir)
# Create remote directories
mdir = sp.call('mkdir -p ' + comb_dir + 'Img ; mkdir -p ' + comb_dir + 'Op ; ', shell=True)
# Generate par file
par_file = 'par_' + slugify(comb)
nml = f90nml.read('template.nml')
nml['refstate']['ra0'] = float(comb['RA'])
nml['tracersin']['K_Fe'] = comb['KFe']
nml['geometry']['r_cmb'] = comb['RCMB']
nztot = min(int(2**(math.log10(float(comb['RA']))+1)), 128)
nml['geometry']['nztot'] = nztot
nml['geometry']['nytot'] = int(math.pi*(comb['RCMB']+0.5)*nztot)
nml.write(par_file, force=True)
logger.info('Created par file ' + par_file)
# Upload par file to remote directory
cpar = sp.call('cp ' + par_file + ' ' + comb_dir, shell=True)
mpar = sp.call('cd ' + comb_dir + ' ; mv ' + par_file+ ' par', shell=True)
logger.info('Done')
def submit_job(self, comb):
"""Use the batch script"""
logger.info('Submiting job on '+ jobserver)
comb_dir = parent_dir + slugify(comb) + '/'
job_sub = sp.Popen('cd ' + comb_dir +
' ; /usr/local/bin/qsub /home/stephane/ExamplePBS/batch_single',
shell=True,
stdout=sp.PIPE, stderr=sp.STDOUT)
return job_sub.stdout.readlines()[-1].split('.')[0]
def workflow(self, comb):
self.create_par_file(comb)
job_id = self.submit_job(comb)
logger.info('Combination %s will be treated by job %s',
slugify(comb), str(job_id))
self.sweeper.done(comb)
def run(self):
self.create_sweeper()
logger.info('%s parameters combinations to be treated', len(self.sweeper.get_sweeps()))
threads = []
while len(self.sweeper.get_remaining()) > 0:
comb = self.sweeper.get_next()
t = Thread(target=self.workflow, args=(comb,))
t.daemon = True
threads.append(t)
t.start()
for t in threads:
t.join()
if __name__ == "__main__":
engine = overturn()
engine.start()