forked from gwastro/pycbc-config
-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathanalysis.ini
283 lines (224 loc) · 8.82 KB
/
analysis.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
; PyCBC configuration for BNS-NSBH-BBH search on O1-ER9-ER10-02 data
;
; Documentation for running the workflow generator is here:
;
; http://ligo-cbc.github.io/pycbc/releases/v1.2.0/html/workflow/pycbc_make_coinc_search_workflow.html
[workflow]
; http://ligo-cbc.github.io/pycbc/releases/v1.2.0/html/workflow/initialization.html
; file retention level can take 4 possible values
; "all_files" for debugging the pipeline
; "all_triggers" recommended for normal running
; "merged_triggers" used to rerun with file reuse for changes that do not affect
; single-detector trigger sets but may affect coinc results
; "results" used to rerun with file reuse to rerun with changes to plots
file-retention-level = all_triggers
[workflow-datafind]
; http://ligo-cbc.github.io/pycbc/releases/v1.2.0/html/workflow/datafind.html
datafind-method = AT_RUNTIME_SINGLE_FRAMES
; Look for times when the segment database says that data is analyzable, but
; no frame data exists on disk. If any frame data is missing, raise an error
datafind-check-segment-gaps = raise_error
; Stat each frame that datafind returns and fail if any frames are missing
datafind-check-frames-exist = raise_error
; Check to see if there are any frames on disk that are not covered in the
; segment_summary table for science segments. This must be "warn" when using
; C00 date, due to known discrepancies between the database segments and GDS
; frames, but should be set to "raise_error" for the final calibration.
datafind-check-segment-summary = warn
[workflow-segments]
; http://ligo-cbc.github.io/pycbc/releases/v1.2.0/html/workflow/segments.html
segments-method = ALL_SINGLE_IFO_TIME
[workflow-tmpltbank]
; See http://ligo-cbc.github.io/pycbc/releases/v1.2.0/html/workflow/template_bank.html
tmpltbank-method = PREGENERATED_BANK
; NOTE - This bank file may or may not be effectual depending on the data and its PSD!
tmpltbank-pregenerated-bank = https://raw.github.com/ligo-cbc/pycbc-config/7a437f481796ffa57a0cf73caa53a1ee641895ab/O2/bank/H1L1-HYPERBANK_SEOBNRv4v2_VARFLOW_THORNE-1163174417-604800.xml.gz
[workflow-splittable]
; http://ligo-cbc.github.io/pycbc/releases/v1.2.0/html/workflow/splittable.html
splittable-method = IN_WORKFLOW
splittable-exe-tag = splitbank
[workflow-splittable-full_data]
; empirically tuned on O2 OSG runtime data to give runtimes between 15 mins and 10 hours, mostly ~6 hours
splittable-num-banks = 32
[workflow-splittable-injections]
; empirically tuned on O2 OSG runtime data to give runtimes between a few mins and ~10 hours, mostly >30 minutes
splittable-num-banks = 8
[workflow-matchedfilter]
; http://ligo-cbc.github.io/pycbc/releases/v1.2.0/html/workflow/matched_filter.html
matchedfilter-method = WORKFLOW_INDEPENDENT_IFOS
min-analysis-segments = 1
min-analysis-length = 528
max-analysis-segments = 5
output-type = hdf
plot-throughput =
[workflow-coincidence]
; http://ligo-cbc.github.io/pycbc/releases/v1.2.0/html/workflow/hdf_coincidence.html
do-trigger-fitting =
; any cuts to remove super-glitchy templates are assumed to have been made in the bank
;background-bins = all:total:lt600
[workflow-coincidence-full]
parallelization-factor = 20
[workflow-coincidence-inj]
parallelization-factor = 5
[workflow-psd]
parallelization-factor = 10
[workflow-gating]
; not clear if this has any meaning now - FIXME
gating-method = PREGENERATED_FILE
[workflow-results]
; number of levels of event hierarchical removal to perform
max-hierarchical-removal = 5
[llwadd]
[segments_from_cats]
[ligolw_combine_segments]
[splitbank]
; split the template bank up randomly between jobs to ensure equal run time
random-sort =
; set a seed so that the random split is deterministic
random-seed = 666
[inspiral]
; parameters for matched filtering
; sine-Gaussian chisq
sgchisq-snr-threshold = 6.0
sgchisq-locations = "mtotal>40:20-30,20-45,20-60,20-75,20-90,20-105,20-120"
; amount of buffer data for letting filters settle
pad-data = 8
; conditioning high-pass filter
strain-high-pass = 15
; working sample rate for matched filtering
sample-rate = 2048
; segmentation of the data
; start-pad must be long enough to contain a full BNS signal
segment-length = 512
segment-start-pad = 144
segment-end-pad = 16
; turn on zero-padding
allow-zero-padding =
; Taper the first and last second of data read in for zero padding
taper-data = 1
; estimation of the noise PSD and construction of the whitening filter
psd-estimation = median
psd-segment-length = 16
psd-segment-stride = 8
psd-inverse-length = 16
; 512s PSD length given by:
; (psd-segment-length + psd-num-segments - 1 * psd-segment-stride)
psd-num-segments = 63
; Autogating options
autogating-threshold = 100
autogating-cluster = 0.5
autogating-width = 0.25
autogating-taper = 0.25
autogating-pad = 16
; starting frequency of matched filter integration
; low-frequency-cutoff is set to the minimum variable frequency in the bank,
; rounded down
enable-bank-start-frequency =
low-frequency-cutoff = 20
; template approximant
; switch to SEOBNRv4 templates as soon as we can (M >= 4)
approximant = 'SPAtmplt:mtotal<4' 'SEOBNRv4_ROM:else'
order = -1
; threshold for generating triggers
snr-threshold = 5.5
; method for clustering triggers over time
cluster-method = window
cluster-window = 1
cluster-function = symmetric
; signal-based vetoes
chisq-bins = "0.72*get_freq('fSEOBNRv4Peak',params.mass1,params.mass2,params.spin1z,params.spin2z)**0.7"
newsnr-threshold = 5
; options for reducing the computational cost and storage
filter-inj-only =
injection-window = 4.5
processing-scheme = mkl
[inspiral-h1]
; Hanford specific matched-filter parameters
channel-name = ${workflow|h1-channel-name}
[inspiral-l1]
; Livingston specific matched-filter parameters
channel-name = ${workflow|l1-channel-name}
[inspiral-v1]
; Virgo specific matched-filter parameters
channel-name = ${workflow|v1-channel-name}
[calculate_psd]
cores = 4
low-frequency-cutoff = ${inspiral|low-frequency-cutoff}
pad-data = ${inspiral|pad-data}
strain-high-pass = ${inspiral|strain-high-pass}
sample-rate = ${inspiral|sample-rate}
segment-length = ${inspiral|segment-length}
segment-start-pad = ${inspiral|segment-start-pad}
segment-end-pad = ${inspiral|segment-end-pad}
psd-estimation = ${inspiral|psd-estimation}
psd-segment-length = ${inspiral|psd-segment-length}
psd-segment-stride = ${inspiral|psd-segment-stride}
psd-num-segments = ${inspiral|psd-num-segments}
taper-data = ${inspiral|taper-data}
autogating-threshold = ${inspiral|autogating-threshold}
autogating-cluster = ${inspiral|autogating-cluster}
autogating-width = ${inspiral|autogating-width}
autogating-taper = ${inspiral|autogating-taper}
autogating-pad = ${inspiral|autogating-pad}
[merge_psds]
[calculate_psd-h1]
channel-name = ${workflow|h1-channel-name}
[calculate_psd-l1]
channel-name = ${workflow|l1-channel-name}
[calculate_psd-v1]
channel-name = ${workflow|v1-channel-name}
[hdf_trigger_merge]
[bank2hdf]
[fit_by_template]
fit-function = exponential
sngl-stat = newsnr_sgveto
stat-threshold = 6.
prune-param = mtotal
log-prune-param =
prune-bins = 2
prune-number = 2
[fit_over_param]
fit-param = template_duration
; f_low required to calculate template duration to smooth the fit over
f-lower = ${inspiral|low-frequency-cutoff}
log-param =
regression-method = tricube
smoothing-width = 0.2
[distribute_background_bins]
[coinc]
; additional time (in seconds) to add to light-travel time to construct time coincidence window
coinc-threshold = 0.005
strict-coinc-time =
ranking-statistic = phasetd_exp_fit_stat_sgveto
; Parameters to reproduce this file available at
; https://git.ligo.org/ligo-cbc/pycbc-software/blob/c2fc41b3c32dbaa2fe859b0d5929b050c01c2b91/statistic-files/v1/make_h1l1_phase_time_amp_v1.sh
statistic-files = file://localhost/cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/statistic-files/v1/H1L1-PHASE_TIME_AMP_v1.hdf
[coinc-full]
; time-slide interval in seconds
timeslide-interval = 0.1
; reduction factor to make lightning bolts for IFAR plots
decimation-factor = 5000
; always store time slide coincs with a stat above specified value
loudest-keep-value = 8.5
[coinc-fullinj&coinc-injfull]
; perform little-dog analysis for software injections
timeslide-interval = ${coinc-full|timeslide-interval}
cluster-window = ${statmap|cluster-window}
; keep only coincs with a stat above specified value in injection little-dog analysis
loudest-keep-value = 8.5
[statmap]
max-hierarchical-removal = ${workflow-results|max-hierarchical-removal}
hierarchical-removal-against = exclusive
[statmap&statmap_inj]
; time window (in seconds) used to remove triggers around zero-lag coincidences
veto-window = 0.100
; cluster each slide separately over a 10 second window
cluster-window = 10.0
[combine_statmap]
cluster-window = ${statmap|cluster-window}
[foreground_censor]
strict-coinc-time =
[hdfinjfind]
; time in seconds within which a trigger must fall to be associated with an injection
injection-window = 2.0
optimal-snr-column = H1:alpha1 L1:alpha2