-
Notifications
You must be signed in to change notification settings - Fork 1
/
crab_data_cfg.py
70 lines (55 loc) · 2.53 KB
/
crab_data_cfg.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
from collections import OrderedDict
from zero_bias_datasets import zero_bias_datasets
from CRABClient.UserUtilities import config
config = config()
config.General.transferOutputs = True
config.General.transferLogs = True
config.JobType.psetName = 'customise_data_cfg.py'
config.JobType.pluginName = 'Analysis'
config.JobType.outputFiles = ['outputFULL.root']
config.JobType.maxMemoryMB = 4000
config.JobType.priority = 99999
config.Data.splitting = 'LumiBased'
config.Data.unitsPerJob = 20
config.Data.publication = True
config.Data.outputDatasetTag = 'doubleTauTriggerOpenPt'
config.Site.storageSite = 'T2_CH_CERN'
# config.Site.blacklist = ['T1_US_FNAL']
# config.Site.whitelist = ['T2_CH_CERN']
if __name__ == '__main__':
from CRABAPI.RawCommand import crabCommand
from CRABClient.ClientExceptions import ClientException
from httplib import HTTPException
tag = 'doubleTauTriggerV3'
def submit(config):
try:
crabCommand('submit', config = config)
except HTTPException as hte:
print "Failed submitting task: %s" % (hte.headers)
except ClientException as cle:
print "Failed submitting task: %s" % (cle)
datasets = zero_bias_datasets
jsons = OrderedDict()
jsons['lumi_1p05e34'] = 'jsons/ntuple_1p05e34.json'
jsons['lumi_1p15e34'] = 'jsons/ntuple_1p15e34.json'
jsons['lumi_1p25e34'] = 'jsons/ntuple_1p25e34.json'
jsons['lumi_1p35e34'] = 'jsons/ntuple_1p35e34.json'
jsons['lumi_1p45e34'] = 'jsons/ntuple_1p45e34.json'
jsons['lumi_8p5e33' ] = 'jsons/ntuple_8p5e33.json'
for kj, vj in jsons.iteritems():
for k, v in datasets.iteritems():
# the JSON files are for H only
if k != 'ZeroBias_2016H':
continue
# We want to put all the CRAB project directories from the tasks we submit here into one common directory.
# That's why we need to set this parameter (here or above in the configuration file, it does not matter, we will not overwrite it).
config.General.workArea = 'crab_data_' + tag + '_' + kj
config.Data.outLFNDirBase = '/store/group/phys_tau/' + tag + '/' + kj
config.Data.lumiMask = vj
config.General.requestName = k
config.Data.inputDataset = v[0]
config.Data.secondaryInputDataset = v[1]
print 'submitting config:'
print config
submit(config)
#import pdb ; pdb.set_trace()