diff --git a/bin/pygrb/pycbc_make_offline_grb_workflow b/bin/pygrb/pycbc_make_offline_grb_workflow index f704c55b570..dde9e49cd15 100644 --- a/bin/pygrb/pycbc_make_offline_grb_workflow +++ b/bin/pygrb/pycbc_make_offline_grb_workflow @@ -248,6 +248,16 @@ if wflow.cp.has_option("workflow-segments", "segments-vetoes"): tags=['veto']) datafind_veto_files.append(veto_file) +# Generate sky grid if needed +skygrid_file = None +if wflow.cp.has_option("workflow", "sky-error"): + if float(wflow.cp.get("workflow", "sky-error")): + logging.info("Generating sky-grid file.") + skygrid_file = _workflow.make_skygrid_node(wflow, df_dir, + tags=['SEARCH']) + #skygrid_file[0].description += '_SKYGRID' + datafind_veto_files.extend(skygrid_file) + # Config file consistency check for IPN GRBs if wflow.cp.has_option("workflow-inspiral", "ipn-search-points") \ and wflow.cp.has_option("workflow-injections", "ipn-sim-points"): diff --git a/pycbc/workflow/grb_utils.py b/pycbc/workflow/grb_utils.py index fc8b32b5b30..e70f5e0d7e0 100644 --- a/pycbc/workflow/grb_utils.py +++ b/pycbc/workflow/grb_utils.py @@ -238,6 +238,28 @@ def get_sky_grid_scale( return out +def make_skygrid_node(workflow, out_dir, tags=None): + """ + Adds a job to the workflow to produce the PyGRB search skygrid.""" + + tags = [] if tags is None else tags + + # Initialize job node + grb_name = workflow.cp.get('workflow', 'trigger-name') + extra_tags = ['GRB'+grb_name] + node = Executable(workflow.cp, 'make_sky_grid', + ifos=workflow.ifos, out_dir=out_dir, + tags=tags+extra_tags).create_node() + node.add_opt('--instruments', ' '.join(workflow.ifos)) + node.new_output_file_opt(workflow.analysis_time, '.h5', '--output', + tags=extra_tags, store_file=True) + + # Add job node to the workflow + workflow += node + + return node.output_files + + def generate_tc_prior(wflow, tc_path, buffer_seg): """ Generate the configuration file for the prior on the coalescence diff --git a/pycbc/workflow/jobsetup.py b/pycbc/workflow/jobsetup.py index 2c90ec19138..db6a349efe4 100644 --- a/pycbc/workflow/jobsetup.py +++ b/pycbc/workflow/jobsetup.py @@ -277,6 +277,7 @@ def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job, curr_out_files = FileList([]) ipn_sky_points = None bank_veto = None + skygrid_file = None input_files = FileList(datafind_outs) for f in datafind_outs: if 'IPN_SKY_POINTS' in f.description: @@ -287,6 +288,10 @@ def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job, elif 'INPUT_BANK_VETO_BANK' in f.description: bank_veto = f input_files.remove(f) + #elif 'SKYGRID' in f.description: + elif 'make_sky_grid' in f.description: + skygrid_file = f + input_files.remove(f) split_bank_counter = 0 @@ -296,7 +301,8 @@ def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job, tag.append(split_bank.tag_str) node = curr_exe_job.create_node(data_seg, job_valid_seg, parent=split_bank, dfParents=input_files, - bankVetoBank=bank_veto, ipn_file=ipn_sky_points, + bankVetoBank=bank_veto, + skygrid_file=skygrid_file, ipn_file=ipn_sky_points, slide=slide_dict, tags=tag) workflow.add_node(node) split_bank_counter += 1 @@ -310,7 +316,7 @@ def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job, node = curr_exe_job.create_node(data_seg, job_valid_seg, parent=split_bank, inj_file=inj_file, tags=tag, dfParents=input_files, bankVetoBank=bank_veto, - ipn_file=ipn_sky_points) + skygrid_file=skygrid_file, ipn_file=ipn_sky_points) workflow.add_node(node) split_bank_counter += 1 curr_out_files.extend(node.output_files) @@ -670,7 +676,8 @@ def __init__(self, cp, name, ifo=None, injection_file=None, self.num_threads = 1 def create_node(self, data_seg, valid_seg, parent=None, inj_file=None, - dfParents=None, bankVetoBank=None, ipn_file=None, + dfParents=None, bankVetoBank=None, + skygrid_file=None, ipn_file=None, slide=None, tags=None): if tags is None: tags = [] @@ -718,6 +725,9 @@ def create_node(self, data_seg, valid_seg, parent=None, inj_file=None, node.add_input(frame_file) node.add_arg(frame_arg) + if skygrid_file is not None: + node.add_input_opt('--sky-grid', skygrid_file) + if ipn_file is not None: node.add_input_opt('--sky-positions-file', ipn_file) diff --git a/pycbc/workflow/matched_filter.py b/pycbc/workflow/matched_filter.py index 877b4113d7e..761d417ecd6 100644 --- a/pycbc/workflow/matched_filter.py +++ b/pycbc/workflow/matched_filter.py @@ -202,8 +202,9 @@ def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs, science_segs : ifo-keyed dictionary of ligo.segments.segmentlist instances The list of times that are being analysed in this workflow. datafind_outs : pycbc.workflow.core.FileList - An FileList of the datafind files that are needed to obtain the - data used in the analysis. + A FileList of the datafind files that are needed to obtain the + data used in the analysis, and (if requested by the user) the vetoes + File and (if requested by the user) the search sky-grid File. tmplt_banks : pycbc.workflow.core.FileList An FileList of the template bank files that will serve as input in this stage. @@ -242,11 +243,20 @@ def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs, if match_fltr_exe == 'pycbc_multi_inspiral': exe_class = select_matchedfilter_class(match_fltr_exe) - # Right ascension + declination must be provided in radians - cp.set('inspiral', 'ra', - cp.get('workflow', 'ra')) - cp.set('inspiral', 'dec', - cp.get('workflow', 'dec')) + bool_sg = ['make_sky_grid' in f.description for f in datafind_outs] + n_sg = sum(bool_sg) + if n_sg == 0: + # Right ascension + declination must be provided in radians + cp.set('inspiral', 'ra', + cp.get('workflow', 'ra')) + cp.set('inspiral', 'dec', + cp.get('workflow', 'dec')) + #elif n_sg == 1: + # print(datafind_outs[bool_sg.index(True)]) + elif m_sg > 1: + msg = f'{datafind_outs} has {n_sg} sky-grid files, ' + msg += 'instead of only one.' + raise ValueError(msg) # At the moment we aren't using sky grids, but when we do this code # might be used then. # from pycbc.workflow.grb_utils import get_sky_grid_scale