From 62e19cd4c5cb27de401171d457d4fe9ac19e753e Mon Sep 17 00:00:00 2001 From: Alexander Harvey Nitz Date: Wed, 29 Jun 2016 14:06:36 +0200 Subject: [PATCH 1/4] remove unused timeslides.py --- pycbc/workflow/timeslides.py | 109 ----------------------------------- 1 file changed, 109 deletions(-) delete mode 100644 pycbc/workflow/timeslides.py diff --git a/pycbc/workflow/timeslides.py b/pycbc/workflow/timeslides.py deleted file mode 100644 index 37a75c8b472..00000000000 --- a/pycbc/workflow/timeslides.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (C) 2013 Ian Harry -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -# Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - -# -# ============================================================================= -# -# Preamble -# -# ============================================================================= -# - -""" -This module is responsible for setting up the time slide files for -workflows. For details about this module and its capabilities see here: -https://ldas-jobs.ligo.caltech.edu/~cbc/docs/pycbc/NOTYETCREATED.html -""" - -import logging -import urllib -from pycbc.workflow.core import File, FileList, make_analysis_dir -from pycbc.workflow.jobsetup import select_generic_executable - -def setup_timeslides_workflow(workflow, output_dir=None, tags=[], - timeSlideSectionName='ligolw_tisi'): - ''' - Setup generation of time_slide input files in the workflow. - Currently used - only with ligolw_tisi to generate files containing the list of slides to be - performed in each time slide job. - - Parameters - ----------- - workflow : pycbc.workflow.core.Workflow - The Workflow instance that the coincidence jobs will be added to. - output_dir : path - The directory in which output files will be stored. - tags : list of strings (optional, default = []) - A list of the tagging strings that will be used for all jobs created - by this call to the workflow. This will be used in output names. - timeSlideSectionName : string (optional, default='injections') - The string that corresponds to the option describing the exe location - in the [executables] section of the .ini file and that corresponds to - the section (and sub-sections) giving the options that will be given to - the code at run time. - Returns - -------- - timeSlideOuts : pycbc.workflow.core.FileList - The list of time slide files created by this call. - ''' - logging.info("Entering time slides setup module.") - make_analysis_dir(output_dir) - # Get ifo list and full analysis segment for output file naming - ifoList = workflow.ifos - ifo_string = workflow.ifo_string - fullSegment = workflow.analysis_time - - # Identify which time-slides to do by presence of sub-sections in the - # configuration file - all_sec = workflow.cp.sections() - timeSlideSections = [sec for sec in all_sec if sec.startswith('tisi-')] - timeSlideTags = [(sec.split('-')[-1]).upper() for sec in timeSlideSections] - - timeSlideOuts = FileList([]) - - # FIXME: Add ability to specify different exes - - # Make the timeSlideFiles - for timeSlideTag in timeSlideTags: - currTags = tags + [timeSlideTag] - - timeSlideMethod = workflow.cp.get_opt_tags("workflow-timeslides", - "timeslides-method", currTags) - - if timeSlideMethod in ["IN_WORKFLOW", "AT_RUNTIME"]: - timeSlideExeTag = workflow.cp.get_opt_tags("workflow-timeslides", - "timeslides-exe", currTags) - timeSlideExe = select_generic_executable(workflow, timeSlideExeTag) - timeSlideJob = timeSlideExe(workflow.cp, timeSlideExeTag, ifos=ifo_string, - tags=currTags, out_dir=output_dir) - timeSlideNode = timeSlideJob.create_node(fullSegment) - if timeSlideMethod == "AT_RUNTIME": - workflow.execute_node(timeSlideNode) - else: - workflow.add_node(timeSlideNode) - tisiOutFile = timeSlideNode.output_files[0] - elif timeSlideMethod == "PREGENERATED": - timeSlideFilePath = workflow.cp.get_opt_tags("workflow-timeslides", - "timeslides-pregenerated-file", currTags) - file_url = urlparse.urljoin('file:', urllib.pathname2url(\ - timeSlideFilePath)) - tisiOutFile = File(ifoString, 'PREGEN_TIMESLIDES', - fullSegment, file_url, tags=currTags) - - timeSlideOuts.append(tisiOutFile) - - return timeSlideOuts From f01f6c494bdc6192bb6059744ca27a31a0ae2759 Mon Sep 17 00:00:00 2001 From: Alexander Harvey Nitz Date: Wed, 29 Jun 2016 14:42:08 +0200 Subject: [PATCH 2/4] increase scrictness --- .landscape.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.landscape.yml b/.landscape.yml index 6e35051abb3..c3f191b69ce 100644 --- a/.landscape.yml +++ b/.landscape.yml @@ -1,8 +1,9 @@ doc-warnings: true test-warnings: no -strictness: medium +strictness: veryhigh autodetect: yes ignore-patterns: + - */__init__.py - examples/* - test/* - docs/* From e48940e8a40a9682d3af7ba7df8332bd5b672856 Mon Sep 17 00:00:00 2001 From: Alexander Harvey Nitz Date: Wed, 29 Jun 2016 14:57:29 +0200 Subject: [PATCH 3/4] test --- .landscape.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.landscape.yml b/.landscape.yml index c3f191b69ce..1e13d4db2b6 100644 --- a/.landscape.yml +++ b/.landscape.yml @@ -1,6 +1,6 @@ doc-warnings: true test-warnings: no -strictness: veryhigh +strictness: high autodetect: yes ignore-patterns: - */__init__.py From dc0a6dfd39cb4424c90d4d2425cdec1f1d07f45b Mon Sep 17 00:00:00 2001 From: Cody Date: Thu, 7 Jul 2016 09:24:33 +0000 Subject: [PATCH 4/4] Avoid mutable default arguments --- pycbc/fft/fft_callback.py | 4 +- pycbc/io/record.py | 4 +- pycbc/results/metadata.py | 4 +- pycbc/tmpltbank/bank_output_utils.py | 8 +- pycbc/workflow/coincidence.py | 48 ++++++--- pycbc/workflow/configuration.py | 6 +- pycbc/workflow/core.py | 12 ++- pycbc/workflow/gatefiles.py | 8 +- pycbc/workflow/grb_utils.py | 4 +- pycbc/workflow/injection.py | 4 +- pycbc/workflow/jobsetup.py | 132 ++++++++++++++++++------ pycbc/workflow/legacy_ihope.py | 70 ++++++++++--- pycbc/workflow/matched_filter.py | 12 ++- pycbc/workflow/plotting.py | 44 ++++++-- pycbc/workflow/postprocessing_cohptf.py | 12 ++- pycbc/workflow/psdfiles.py | 8 +- pycbc/workflow/segment.py | 4 +- pycbc/workflow/splittable.py | 4 +- pycbc/workflow/tmpltbank.py | 16 ++- test/fft_base.py | 8 +- tools/static/runtime-scipy.py | 8 +- 21 files changed, 317 insertions(+), 103 deletions(-) diff --git a/pycbc/fft/fft_callback.py b/pycbc/fft/fft_callback.py index 843adda5726..807defd1473 100644 --- a/pycbc/fft/fft_callback.py +++ b/pycbc/fft/fft_callback.py @@ -199,9 +199,11 @@ def compile(source, name): fhash.write(str(hash(source))) return lib_file -def get_fn_plan(callback=None, out_callback=None, name='pycbc_cufft', parameters=[]): +def get_fn_plan(callback=None, out_callback=None, name='pycbc_cufft', parameters=None): """ Get the IFFT execute and plan functions """ + if parameters is None: + parameters = [] source = fftsrc.render(input_callback=callback, output_callback=out_callback, parameters=parameters) path = compile(source, name) lib = ctypes.cdll.LoadLibrary(path) diff --git a/pycbc/io/record.py b/pycbc/io/record.py index 90130522f61..f16d9aeeebc 100644 --- a/pycbc/io/record.py +++ b/pycbc/io/record.py @@ -1146,12 +1146,14 @@ def default_fields(cls, include_virtual=True, **kwargs): return dict(cls._staticfields.items() + add_fields.items()) - def __new__(cls, shape, name=None, additional_fields=None, field_kwargs={}, + def __new__(cls, shape, name=None, additional_fields=None, field_kwargs=None, **kwargs): """The ``additional_fields`` should be specified in the same way as ``dtype`` is normally given to FieldArray. The ``field_kwargs`` are passed to the class's default_fields method as keyword arguments. """ + if field_kwargs is None: + field_kwargs = {} if 'names' in kwargs and 'dtype' in kwargs: raise ValueError("Please provide names or dtype, not both") default_fields = cls.default_fields(include_virtual=False, diff --git a/pycbc/results/metadata.py b/pycbc/results/metadata.py index dc58b7adeb0..ea34a1833dc 100644 --- a/pycbc/results/metadata.py +++ b/pycbc/results/metadata.py @@ -94,7 +94,7 @@ def load_png_metadata(filename): '.html':load_html_metadata, } -def save_fig_with_metadata(fig, filename, fig_kwds={}, **kwds): +def save_fig_with_metadata(fig, filename, fig_kwds=None, **kwds): """ Save plot to file with metadata included. Kewords translate to metadata that is stored directly in the plot file. Limited format types available. @@ -105,6 +105,8 @@ def save_fig_with_metadata(fig, filename, fig_kwds={}, **kwds): filename: str Name of file to store the plot. """ + if fig_kwds is None: + fig_kwds = {} try: extension = os.path.splitext(filename)[1] kwds['version'] = pycbc.version.git_verbose_msg diff --git a/pycbc/tmpltbank/bank_output_utils.py b/pycbc/tmpltbank/bank_output_utils.py index 3c856aa127a..c72853ba922 100644 --- a/pycbc/tmpltbank/bank_output_utils.py +++ b/pycbc/tmpltbank/bank_output_utils.py @@ -43,7 +43,7 @@ def return_empty_sngl(): return sngl def return_search_summary(start_time=0, end_time=0, nevents=0, - ifos=[], **kwargs): + ifos=None, **kwargs): """ Function to create a SearchSummary object where all columns are populated but all are set to values that test False (ie. strings to '', floats/ints @@ -59,6 +59,8 @@ def return_search_summary(start_time=0, end_time=0, nevents=0, lsctables.SeachSummary The "empty" SearchSummary object. """ + if ifos is None: + ifos = [] # create an empty search summary search_summary = lsctables.SearchSummary() @@ -283,7 +285,7 @@ def calculate_ethinca_metric_comps(metricParams, ethincaParams, mass1, mass2, return fMax_theor, gammaVals def output_sngl_inspiral_table(outputFile, tempBank, metricParams, - ethincaParams, programName="", optDict = {}, + ethincaParams, programName="", optDict = None, outdoc=None, **kwargs): """ Function that converts the information produced by the various pyCBC bank @@ -317,6 +319,8 @@ def output_sngl_inspiral_table(outputFile, tempBank, metricParams, All other key word arguments will be passed directly to ligolw_process.register_to_xmldoc """ + if optDict is None: + optDict = {} if outdoc is None: outdoc = ligolw.Document() outdoc.appendChild(ligolw.LIGO_LW()) diff --git a/pycbc/workflow/coincidence.py b/pycbc/workflow/coincidence.py index 169766f105e..7ea1bf6d6fa 100644 --- a/pycbc/workflow/coincidence.py +++ b/pycbc/workflow/coincidence.py @@ -57,7 +57,9 @@ class PyCBCFindCoincExecutable(Executable): """ Find coinc triggers using a folded interval method """ current_retention_level = Executable.ALL_TRIGGERS - def create_node(self, trig_files, bank_file, veto_file, veto_name, template_str, tags=[]): + def create_node(self, trig_files, bank_file, veto_file, veto_name, template_str, tags=None): + if tags is None: + tags = [] segs = trig_files.get_times_covered_by_files() seg = segments.segment(segs[0][0], segs[-1][1]) node = Node(self) @@ -75,7 +77,9 @@ class PyCBCStatMapExecutable(Executable): """ Calculate FAP, IFAR, etc """ current_retention_level = Executable.MERGED_TRIGGERS - def create_node(self, coinc_files, tags=[]): + def create_node(self, coinc_files, tags=None): + if tags is None: + tags = [] segs = coinc_files.get_times_covered_by_files() seg = segments.segment(segs[0][0], segs[-1][1]) @@ -89,7 +93,9 @@ class PyCBCStatMapInjExecutable(Executable): """ Calculate FAP, IFAR, etc """ current_retention_level = Executable.MERGED_TRIGGERS - def create_node(self, zerolag, full_data, injfull, fullinj, tags=[]): + def create_node(self, zerolag, full_data, injfull, fullinj, tags=None): + if tags is None: + tags = [] segs = zerolag.get_times_covered_by_files() seg = segments.segment(segs[0][0], segs[-1][1]) @@ -106,7 +112,9 @@ class PyCBCHDFInjFindExecutable(Executable): """ Find injections in the hdf files output """ current_retention_level = Executable.MERGED_TRIGGERS - def create_node(self, inj_coinc_file, inj_xml_file, veto_file, veto_name, tags=[]): + def create_node(self, inj_coinc_file, inj_xml_file, veto_file, veto_name, tags=None): + if tags is None: + tags = [] node = Node(self) node.add_input_list_opt('--trigger-file', inj_coinc_file) node.add_input_list_opt('--injection-file', inj_xml_file) @@ -120,7 +128,9 @@ def create_node(self, inj_coinc_file, inj_xml_file, veto_file, veto_name, tags=[ class PyCBCDistributeBackgroundBins(Executable): """ Distribute coinc files amoung different background bins """ current_retention_level = Executable.ALL_TRIGGERS - def create_node(self, coinc_files, bank_file, background_bins, tags=[]): + def create_node(self, coinc_files, bank_file, background_bins, tags=None): + if tags is None: + tags = [] node = Node(self) node.add_input_list_opt('--coinc-files', coinc_files) node.add_input_opt('--bank-file', bank_file) @@ -140,7 +150,9 @@ def create_node(self, coinc_files, bank_file, background_bins, tags=[]): class PyCBCCombineStatmap(Executable): current_retention_level = Executable.MERGED_TRIGGERS - def create_node(self, stat_files, tags=[]): + def create_node(self, stat_files, tags=None): + if tags is None: + tags = [] node = Node(self) node.add_input_list_opt('--statmap-files', stat_files) node.new_output_file_opt(stat_files[0].segment, '.hdf', '--output-file', tags=tags) @@ -165,7 +177,9 @@ def make_foreground_censored_veto(workflow, bg_file, veto_file, veto_name, workflow += node return node.output_files[0] -def merge_single_detector_hdf_files(workflow, bank_file, trigger_files, out_dir, tags=[]): +def merge_single_detector_hdf_files(workflow, bank_file, trigger_files, out_dir, tags=None): + if tags is None: + tags = [] make_analysis_dir(out_dir) out = FileList() for ifo in workflow.ifos: @@ -179,7 +193,9 @@ def merge_single_detector_hdf_files(workflow, bank_file, trigger_files, out_dir, return out def find_injections_in_hdf_coinc(workflow, inj_coinc_file, inj_xml_file, - veto_file, veto_name, out_dir, tags=[]): + veto_file, veto_name, out_dir, tags=None): + if tags is None: + tags = [] make_analysis_dir(out_dir) exe = PyCBCHDFInjFindExecutable(workflow.cp, 'hdfinjfind', ifos=workflow.ifos, @@ -188,9 +204,11 @@ def find_injections_in_hdf_coinc(workflow, inj_coinc_file, inj_xml_file, workflow += node return node.output_files[0] -def convert_bank_to_hdf(workflow, xmlbank, out_dir, tags=[]): +def convert_bank_to_hdf(workflow, xmlbank, out_dir, tags=None): """Return the template bank in hdf format """ + if tags is None: + tags = [] #FIXME, make me not needed if len(xmlbank) > 1: raise ValueError('Can only convert a single template bank') @@ -204,9 +222,11 @@ def convert_bank_to_hdf(workflow, xmlbank, out_dir, tags=[]): workflow.add_node(bank2hdf_node) return bank2hdf_node.output_files -def convert_trig_to_hdf(workflow, hdfbank, xml_trigger_files, out_dir, tags=[]): +def convert_trig_to_hdf(workflow, hdfbank, xml_trigger_files, out_dir, tags=None): """Return the list of hdf5 trigger files outpus """ + if tags is None: + tags = [] #FIXME, make me not needed logging.info('convert single inspiral trigger files to hdf5') make_analysis_dir(out_dir) @@ -325,11 +345,13 @@ def setup_background_bins_inj(workflow, coinc_files, background_file, bank_file, return cstat_node.output_files[0] def setup_interval_coinc_inj(workflow, hdfbank, full_data_trig_files, inj_trig_files, - background_file, veto_file, veto_name, out_dir, tags=[]): + background_file, veto_file, veto_name, out_dir, tags=None): """ This function sets up exact match coincidence and background estimation using a folded interval technique. """ + if tags is None: + tags = [] make_analysis_dir(out_dir) logging.info('Setting up coincidence for injection') @@ -374,11 +396,13 @@ def setup_interval_coinc_inj(workflow, hdfbank, full_data_trig_files, inj_trig_f return setup_statmap_inj(workflow, bg_files, background_file, hdfbank, out_dir, tags=tags) def setup_interval_coinc(workflow, hdfbank, trig_files, - veto_files, veto_names, out_dir, tags=[]): + veto_files, veto_names, out_dir, tags=None): """ This function sets up exact match coincidence and background estimation using a folded interval technique. """ + if tags is None: + tags = [] make_analysis_dir(out_dir) logging.info('Setting up coincidence') diff --git a/pycbc/workflow/configuration.py b/pycbc/workflow/configuration.py index ab884d9b9b8..3daaaae8d75 100644 --- a/pycbc/workflow/configuration.py +++ b/pycbc/workflow/configuration.py @@ -138,7 +138,7 @@ class WorkflowConfigParser(glue.pipeline.DeepCopyableConfigParser): This is a sub-class of glue.pipeline.DeepCopyableConfigParser, which lets us add a few additional helper features that are useful in workflows. """ - def __init__(self, configFiles=[], overrideTuples=[], parsedFilePath=None): + def __init__(self, configFiles=None, overrideTuples=None, parsedFilePath=None): """ Initialize an WorkflowConfigParser. This reads the input configuration files, overrides values if necessary and performs the interpolation. @@ -160,6 +160,10 @@ def __init__(self, configFiles=[], overrideTuples=[], parsedFilePath=None): WorkflowConfigParser Initialized WorkflowConfigParser instance. """ + if configFiles is None: + configFiles = [] + if overrideTuples is None: + overrideTuples = [] glue.pipeline.DeepCopyableConfigParser.__init__(self) # Enable case sensitive options diff --git a/pycbc/workflow/core.py b/pycbc/workflow/core.py index 207bfb0c923..9b42085e001 100644 --- a/pycbc/workflow/core.py +++ b/pycbc/workflow/core.py @@ -123,7 +123,7 @@ class Executable(pegasus_workflow.Executable): # file will be retained, but a warning given current_retention_level = KEEP_BUT_RAISE_WARNING def __init__(self, cp, name, - universe=None, ifos=None, out_dir=None, tags=[]): + universe=None, ifos=None, out_dir=None, tags=None): """ Initialize the Executable class. @@ -146,6 +146,8 @@ def __init__(self, cp, name, tags : list of strings A list of strings that is used to identify this job. """ + if tags is None: + tags = [] tags = [tag.upper() for tag in tags] self.tags = tags if isinstance(ifos, (str, unicode)): @@ -559,7 +561,7 @@ def get_command_line(self): return [exe_path] + arglist - def new_output_file_opt(self, valid_seg, extension, option_name, tags=[], + def new_output_file_opt(self, valid_seg, extension, option_name, tags=None, store_file=None, use_tmp_subdirs=False): """ This function will create a workflow.File object corresponding to the given @@ -584,6 +586,8 @@ def new_output_file_opt(self, valid_seg, extension, option_name, tags=[], in the specified output location if True. If false file will be removed when no longer needed in the workflow. """ + if tags is None: + tags = [] # Changing this from set(tags) to enforce order. It might make sense # for all jobs to have file names with tags in the same order. @@ -634,7 +638,7 @@ def add_multiifo_output_list_opt(self, opt, outputs): self._add_output(outfile) def new_multiifo_output_list_opt(self, opt, ifos, analysis_time, extension, - tags=[], store_file=None, + tags=None, store_file=None, use_tmp_subdirs=False): """ Add an option that determines a list of outputs from multiple detectors. Files will be supplied as --opt ifo1:input1 ifo2:input2 @@ -642,6 +646,8 @@ def new_multiifo_output_list_opt(self, opt, ifos, analysis_time, extension, File names are created internally from the provided extension and analysis time. """ + if tags is None: + tags = [] all_tags = copy.deepcopy(self.executable.tags) for tag in tags: if tag not in all_tags: diff --git a/pycbc/workflow/gatefiles.py b/pycbc/workflow/gatefiles.py index 9c8cebf9550..588aa262183 100644 --- a/pycbc/workflow/gatefiles.py +++ b/pycbc/workflow/gatefiles.py @@ -36,7 +36,7 @@ from pycbc.workflow.core import File, FileList, make_analysis_dir, resolve_url def setup_gating_workflow(workflow, science_segs, datafind_outs, - output_dir=None, tags=[]): + output_dir=None, tags=None): ''' Setup gating section of CBC workflow. At present this only supports pregenerated gating files, in the future these could be created within the workflow. @@ -61,6 +61,8 @@ def setup_gating_workflow(workflow, science_segs, datafind_outs, gate_files : pycbc.workflow.core.FileList The FileList holding the gate files, 0 or 1 per ifo ''' + if tags is None: + tags = [] logging.info("Entering gating module.") make_analysis_dir(output_dir) cp = workflow.cp @@ -86,7 +88,7 @@ def setup_gating_workflow(workflow, science_segs, datafind_outs, return gate_files -def setup_gate_pregenerated(workflow, tags=[]): +def setup_gate_pregenerated(workflow, tags=None): ''' Setup CBC workflow to use pregenerated gating files. The file given in cp.get('workflow','pregenerated-gating-file-(ifo)') will @@ -105,6 +107,8 @@ def setup_gate_pregenerated(workflow, tags=[]): gate_files : pycbc.workflow.core.FileList The FileList holding the gating files ''' + if tags is None: + tags = [] gate_files = FileList([]) cp = workflow.cp diff --git a/pycbc/workflow/grb_utils.py b/pycbc/workflow/grb_utils.py index 2da3cc36836..dad785fa256 100644 --- a/pycbc/workflow/grb_utils.py +++ b/pycbc/workflow/grb_utils.py @@ -199,7 +199,7 @@ def make_exttrig_file(cp, ifos, sci_seg, out_dir): return xml_file -def get_ipn_sky_files(workflow, tags=[]): +def get_ipn_sky_files(workflow, tags=None): ''' Retreive the sky point files for searching over the IPN error box and populating it with injections. @@ -219,6 +219,8 @@ def get_ipn_sky_files(workflow, tags=[]): ipn_files : pycbc.workflow.core.FileList FileList holding the details of the IPN sky point files. ''' + if tags is None: + tags = [] cp = workflow.cp ipn_search_points = cp.get("workflow-inspiral", "ipn-search-points") ipn_search_points = resolve_url(ipn_search_points) diff --git a/pycbc/workflow/injection.py b/pycbc/workflow/injection.py index 6d75c484bab..0e0bef7b420 100644 --- a/pycbc/workflow/injection.py +++ b/pycbc/workflow/injection.py @@ -77,7 +77,7 @@ def cut_distant_injections(workflow, inj_file, out_dir, tags=None): def setup_injection_workflow(workflow, output_dir=None, inj_section_name='injections', exttrig_file=None, - tags =[]): + tags =None): """ This function is the gateway for setting up injection-generation jobs in a workflow. It should be possible for this function to support a number @@ -109,6 +109,8 @@ def setup_injection_workflow(workflow, output_dir=None, identify them. The FileList class contains functions to search based on tags. """ + if tags is None: + tags = [] logging.info("Entering injection module.") make_analysis_dir(output_dir) diff --git a/pycbc/workflow/jobsetup.py b/pycbc/workflow/jobsetup.py index 99a027c0e9d..6d5d753c1bb 100644 --- a/pycbc/workflow/jobsetup.py +++ b/pycbc/workflow/jobsetup.py @@ -321,10 +321,12 @@ def sngl_ifo_job_setup(workflow, ifo, out_files, curr_exe_job, science_segs, def multi_ifo_coherent_job_setup(workflow, out_files, curr_exe_job, science_segs, datafind_outs, output_dir, - parents=None, tags=[]): + parents=None, tags=None): """ Method for setting up coherent inspiral jobs. """ + if tags is None: + tags = [] cp = workflow.cp ifos = science_segs.keys() job_tag = curr_exe_job.name.upper() @@ -718,7 +720,9 @@ class PyCBCInspiralExecutable(Executable): current_retention_level = Executable.ALL_TRIGGERS def __init__(self, cp, exe_name, ifo=None, out_dir=None, injection_file=None, - gate_files=None, tags=[]): + gate_files=None, tags=None): + if tags is None: + tags = [] super(PyCBCInspiralExecutable, self).__init__(cp, exe_name, None, ifo, out_dir, tags=tags) self.cp = cp self.set_memory(2000) @@ -743,7 +747,9 @@ def __init__(self, cp, exe_name, ifo=None, out_dir=None, injection_file=None, if len(stxt.split(':')) > 1: self.num_threads = stxt.split(':')[1] - def create_node(self, data_seg, valid_seg, parent=None, dfParents=None, tags=[]): + def create_node(self, data_seg, valid_seg, parent=None, dfParents=None, tags=None): + if tags is None: + tags = [] node = Node(self) pad_data = int(self.get_opt('pad-data')) if pad_data is None: @@ -906,14 +912,18 @@ class PyCBCTmpltbankExecutable(Executable): current_retention_level = Executable.MERGED_TRIGGERS def __init__(self, cp, exe_name, ifo=None, out_dir=None, - tags=[], write_psd=False, psd_files=None): + tags=None, write_psd=False, psd_files=None): + if tags is None: + tags = [] super(PyCBCTmpltbankExecutable, self).__init__(cp, exe_name, 'vanilla', ifo, out_dir, tags=tags) self.cp = cp self.set_memory(2000) self.write_psd = write_psd self.psd_files = psd_files - def create_node(self, data_seg, valid_seg, parent=None, dfParents=None, tags=[]): + def create_node(self, data_seg, valid_seg, parent=None, dfParents=None, tags=None): + if tags is None: + tags = [] node = Node(self) if not dfParents: @@ -941,7 +951,7 @@ def create_node(self, data_seg, valid_seg, parent=None, dfParents=None, tags=[]) node.add_input_list_opt('--frame-files', dfParents) return node - def create_nodata_node(self, valid_seg, tags=[]): + def create_nodata_node(self, valid_seg, tags=None): """ A simplified version of create_node that creates a node that does not need to read in data. @@ -956,6 +966,8 @@ def create_nodata_node(self, valid_seg, tags=[]): node : pycbc.workflow.core.Node The instance corresponding to the created node. """ + if tags is None: + tags = [] node = Node(self) # Set the output file @@ -1012,11 +1024,15 @@ class LigolwAddExecutable(Executable): """ The class used to create nodes for the ligolw_add Executable. """ current_retention_level = Executable.INTERMEDIATE_PRODUCT - def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, tags=[]): + def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, tags=None): + if tags is None: + tags = [] super(LigolwAddExecutable, self).__init__(cp, exe_name, universe, ifo, out_dir, tags=tags) self.set_memory(2000) - def create_node(self, jobSegment, input_files, output=None, tags=[]): + def create_node(self, jobSegment, input_files, output=None, tags=None): + if tags is None: + tags = [] node = Node(self) # Very few options to ligolw_add, all input files are given as a long @@ -1044,14 +1060,18 @@ class LigolwSSthincaExecutable(Executable): current_retention_level = Executable.MERGED_TRIGGERS def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - dqVetoName=None, tags=[]): + dqVetoName=None, tags=None): + if tags is None: + tags = [] super(LigolwSSthincaExecutable, self).__init__(cp, exe_name, universe, ifo, out_dir, tags=tags) self.set_memory(2000) if dqVetoName: self.add_opt("--vetoes-name", dqVetoName) - def create_node(self, jobSegment, coincSegment, inputFile, tags=[], + def create_node(self, jobSegment, coincSegment, inputFile, tags=None, write_likelihood=False): + if tags is None: + tags = [] node = Node(self) node.add_input_arg(inputFile) @@ -1080,7 +1100,9 @@ class PycbcSqliteSimplifyExecutable(Executable): """ The class responsible for making jobs for pycbc_sqlite_simplify. """ current_retention_level = Executable.INTERMEDIATE_PRODUCT - def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, tags=[]): + def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, tags=None): + if tags is None: + tags = [] super(PycbcSqliteSimplifyExecutable, self).__init__(cp, exe_name, universe, ifo, out_dir, tags=tags) self.set_memory(2000) @@ -1144,7 +1166,9 @@ class SQLInOutExecutable(Executable): one output. """ current_retention_level=Executable.ALL_TRIGGERS - def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, tags=[]): + def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, tags=None): + if tags is None: + tags = [] super(SQLInOutExecutable, self).__init__(cp, exe_name, universe, ifo, out_dir, tags=tags) def create_node(self, job_segment, input_file): @@ -1174,7 +1198,9 @@ class ExtractToXMLExecutable(Executable): """ current_retention_level = Executable.INTERMEDIATE_PRODUCT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) def create_node(self, job_segment, input_file): @@ -1203,7 +1229,9 @@ class InspinjfindExecutable(Executable): """ current_retention_level = Executable.INTERMEDIATE_PRODUCT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) def create_node(self, job_segment, input_file): @@ -1223,7 +1251,9 @@ def __init__(self, cp, exe_name, num_splits, universe=None, ifo=None, super(PycbcSplitInspinjExecutable, self).__init__(cp, exe_name, universe, ifo, out_dir, tags=[]) self.num_splits = int(num_splits) - def create_node(self, parent, tags=[]): + def create_node(self, parent, tags=None): + if tags is None: + tags = [] node = Node(self) node.add_input_opt('--input-file', parent) @@ -1255,7 +1285,9 @@ class PycbcPickleHorizonDistsExecutable(Executable): # FIXME: This class will soon be removed when gstlal post-proc is updated current_retention_level = Executable.INTERMEDIATE_PRODUCT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) def create_node(self, job_segment, trigger_files): @@ -1274,7 +1306,9 @@ class PycbcCombineLikelihoodExecutable(Executable): """ current_retention_level = Executable.INTERMEDIATE_PRODUCT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) def create_node(self, job_segment, likelihood_files, horizon_dist_file): @@ -1293,7 +1327,9 @@ class PycbcGenerateRankingDataExecutable(Executable): """ current_retention_level = Executable.INTERMEDIATE_PRODUCT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) self.set_num_cpus(4) @@ -1314,7 +1350,9 @@ class PycbcCalculateLikelihoodExecutable(Executable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) def create_node(self, job_segment, trigger_file, likelihood_file, @@ -1340,7 +1378,9 @@ class GstlalMarginalizeLikelihoodExecutable(Executable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) def create_node(self, job_segment, input_file): @@ -1356,7 +1396,9 @@ class GstlalFarfromsnrchisqhistExecutable(Executable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) def create_node(self, job_segment, non_inj_db, marg_input_file, @@ -1382,7 +1424,9 @@ class GstlalPlotSensitivity(Executable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) self.set_memory('4000') @@ -1399,7 +1443,9 @@ class GstlalPlotSummary(Executable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) self.set_memory('4000') @@ -1416,7 +1462,9 @@ class GstlalPlotBackground(Executable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) self.set_memory('4000') @@ -1432,7 +1480,9 @@ class GstlalSummaryPage(Executable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, exe_name, universe=None, ifo=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifo, out_dir, tags=tags) def create_and_add_node(self, workflow, parent_nodes): @@ -1452,7 +1502,9 @@ class LalappsInspinjExecutable(Executable): The class used to create jobs for the lalapps_inspinj Executable. """ current_retention_level = Executable.FINAL_RESULT - def create_node(self, segment, exttrig_file=None, tags=[]): + def create_node(self, segment, exttrig_file=None, tags=None): + if tags is None: + tags = [] node = Node(self) curr_tags = self.tags + tags @@ -1509,14 +1561,18 @@ class PycbcDarkVsBrightInjectionsExecutable(Executable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, exe_name, universe=None, ifos=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifos, out_dir, tags=tags) self.cp = cp self.out_dir = out_dir self.exe_name = exe_name - def create_node(self, parent, segment, tags=[]): + def create_node(self, parent, segment, tags=None): + if tags is None: + tags = [] node = Node(self) if not parent: raise ValueError("Must provide an input file.") @@ -1547,14 +1603,18 @@ class LigolwCBCJitterSkylocExecutable(Executable): """ current_retention_level = Executable.MERGED_TRIGGERS def __init__(self, cp, exe_name, universe=None, ifos=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifos, out_dir, tags=tags) self.cp = cp self.out_dir = out_dir self.exe_name = exe_name - def create_node(self, parent, segment, tags=[]): + def create_node(self, parent, segment, tags=None): + if tags is None: + tags = [] if not parent: raise ValueError("Must provide an input file.") @@ -1573,14 +1633,18 @@ class LigolwCBCAlignTotalSpinExecutable(Executable): """ current_retention_level = Executable.MERGED_TRIGGERS def __init__(self, cp, exe_name, universe=None, ifos=None, out_dir=None, - tags=[]): + tags=None): + if tags is None: + tags = [] Executable.__init__(self, cp, exe_name, universe, ifos, out_dir, tags=tags) self.cp = cp self.out_dir = out_dir self.exe_name = exe_name - def create_node(self, parent, segment, tags=[]): + def create_node(self, parent, segment, tags=None): + if tags is None: + tags = [] if not parent: raise ValueError("Must provide an input file.") @@ -1614,7 +1678,7 @@ def __init__(self, cp, exe_name, num_banks, ifo, out_dir, tags=[]) self.num_banks = int(num_banks) - def create_node(self, bank, tags=[]): + def create_node(self, bank, tags=None): """ Set up a CondorDagmanNode class to run lalapps_splitbank code @@ -1628,6 +1692,8 @@ def create_node(self, bank, tags=[]): node : pycbc.workflow.core.Node The node to run the job """ + if tags is None: + tags = [] node = Node(self) node.add_input_opt('--bank-file', bank) diff --git a/pycbc/workflow/legacy_ihope.py b/pycbc/workflow/legacy_ihope.py index ef6732371b6..43466396441 100644 --- a/pycbc/workflow/legacy_ihope.py +++ b/pycbc/workflow/legacy_ihope.py @@ -104,10 +104,14 @@ class LegacyAnalysisExecutable(Executable): Executables. """ current_retention_level = Executable.MERGED_TRIGGERS - def __init__(self, cp, name, universe=None, ifo=None, tags=[], out_dir=None): + def __init__(self, cp, name, universe=None, ifo=None, tags=None, out_dir=None): + if tags is None: + tags = [] super(LegacyAnalysisExecutable, self).__init__(cp, name, universe, ifo, out_dir, tags=tags) - def create_node(self, data_seg, valid_seg, parent=None, dfParents=None, tags=[]): + def create_node(self, data_seg, valid_seg, parent=None, dfParents=None, tags=None): + if tags is None: + tags = [] node = LegacyAnalysisNode(self) if not dfParents: @@ -152,7 +156,9 @@ class LegacyCohPTFInspiralExecutable(LegacyAnalysisExecutable): """ current_retention_level = Executable.MERGED_TRIGGERS def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, - gate_files=None, out_dir=None, tags=[]): + gate_files=None, out_dir=None, tags=None): + if tags is None: + tags = [] super(LegacyCohPTFInspiralExecutable, self).__init__(cp, name, universe, ifo, out_dir=out_dir, tags=tags) self.injection_file = injection_file @@ -161,7 +167,9 @@ def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, self.num_threads = 1 def create_node(self, data_seg, valid_seg, parent=None, inj_file=None, - dfParents=None, bankVetoBank=None, ipn_file=None, tags=[]): + dfParents=None, bankVetoBank=None, ipn_file=None, tags=None): + if tags is None: + tags = [] node = Node(self) if not dfParents: @@ -243,7 +251,9 @@ class LegacyCohPTFTrigCombiner(LegacyAnalysisExecutable): """ current_retention_level = Executable.INTERMEDIATE_PRODUCT def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, - out_dir=None, tags=[]): + out_dir=None, tags=None): + if tags is None: + tags = [] super(LegacyCohPTFTrigCombiner, self).__init__(cp, name, universe, ifo=ifo, out_dir=out_dir, tags=tags) self.cp = cp @@ -251,7 +261,11 @@ def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, self.num_threads = 1 def create_node(self, trig_files=None, segment_dir=None, analysis_seg=None, - out_tags=[], tags=[]): + out_tags=None, tags=None): + if out_tags is None: + out_tags = [] + if tags is None: + tags = [] node = Node(self) if not trig_files: @@ -305,14 +319,18 @@ class LegacyCohPTFTrigCluster(LegacyAnalysisExecutable): """ current_retention_level = Executable.MERGED_TRIGGERS def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, - out_dir=None, tags=[]): + out_dir=None, tags=None): + if tags is None: + tags = [] super(LegacyCohPTFTrigCluster, self).__init__(cp, name, universe, ifo=ifo, out_dir=out_dir, tags=tags) self.cp = cp self.ifos = ifo self.num_threads = 1 - def create_node(self, parent, tags=[]): + def create_node(self, parent, tags=None): + if tags is None: + tags = [] node = Node(self) # Set input / output options @@ -338,14 +356,18 @@ class LegacyCohPTFInjfinder(LegacyAnalysisExecutable): """ current_retention_level = Executable.MERGED_TRIGGERS def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, - out_dir=None, tags=[]): + out_dir=None, tags=None): + if tags is None: + tags = [] super(LegacyCohPTFInjfinder, self).__init__(cp, name, universe, ifo=ifo, out_dir=out_dir, tags=tags) self.cp = cp self.ifos = ifo self.num_threads = 1 - def create_node(self, trig_files, inj_files, seg_dir, tags=[]): + def create_node(self, trig_files, inj_files, seg_dir, tags=None): + if tags is None: + tags = [] node = Node(self) # Set input / output options @@ -378,7 +400,9 @@ class LegacyCohPTFInjcombiner(LegacyAnalysisExecutable): """ current_retention_level = Executable.MERGED_TRIGGERS def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, - out_dir=None, tags=[]): + out_dir=None, tags=None): + if tags is None: + tags = [] super(LegacyCohPTFInjcombiner, self).__init__(cp, name, universe, ifo=ifo, out_dir=out_dir, tags=tags) self.ifos = ifo @@ -415,13 +439,17 @@ class LegacyCohPTFSbvPlotter(LegacyAnalysisExecutable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, - out_dir=None, tags=[]): + out_dir=None, tags=None): + if tags is None: + tags = [] super(LegacyCohPTFSbvPlotter, self).__init__(cp, name, universe, ifo=ifo, out_dir=out_dir, tags=tags) self.ifos = ifo self.num_threads = 1 - def create_node(self, parent=None, seg_dir=None, inj_file=None, tags=[]): + def create_node(self, parent=None, seg_dir=None, inj_file=None, tags=None): + if tags is None: + tags = [] node = Node(self) if not parent: @@ -457,14 +485,18 @@ class LegacyCohPTFEfficiency(LegacyAnalysisExecutable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, - out_dir=None, tags=[]): + out_dir=None, tags=None): + if tags is None: + tags = [] super(LegacyCohPTFEfficiency, self).__init__(cp, name, universe, ifo=ifo, out_dir=out_dir, tags=tags) self.ifos = ifo self.num_threads = 1 def create_node(self, parent=None, offsource_file=None, seg_dir=None, - found_file=None, missed_file=None, tags=[]): + found_file=None, missed_file=None, tags=None): + if tags is None: + tags = [] node = Node(self) if not parent: @@ -519,7 +551,9 @@ class PyGRBMakeSummaryPage(LegacyAnalysisExecutable): """ current_retention_level = Executable.FINAL_RESULT def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, - out_dir=None, tags=[]): + out_dir=None, tags=None): + if tags is None: + tags = [] super(PyGRBMakeSummaryPage, self).__init__(cp, name, universe, ifo=ifo, out_dir=out_dir, tags=tags) self.ifos = ifo @@ -527,7 +561,9 @@ def __init__(self, cp, name, universe=None, ifo=None, injection_file=None, def create_node(self, parent=None, c_file=None, open_box=False, seg_plot=None, tuning_tags=None, exclusion_tags=None, - html_dir=None, tags=[]): + html_dir=None, tags=None): + if tags is None: + tags = [] node = Node(self) node.add_opt('--grb-name', self.cp.get('workflow', 'trigger-name')) diff --git a/pycbc/workflow/matched_filter.py b/pycbc/workflow/matched_filter.py index 7ccec8c44d6..efb391e7ad8 100644 --- a/pycbc/workflow/matched_filter.py +++ b/pycbc/workflow/matched_filter.py @@ -38,7 +38,7 @@ def setup_matchedfltr_workflow(workflow, science_segs, datafind_outs, tmplt_banks, output_dir=None, - injection_file=None, gate_files=None, tags=[]): + injection_file=None, gate_files=None, tags=None): ''' This function aims to be the gateway for setting up a set of matched-filter jobs in a workflow. This function is intended to support multiple @@ -78,6 +78,8 @@ def setup_matchedfltr_workflow(workflow, science_segs, datafind_outs, If you require access to any intermediate products produced at this stage you can call the various sub-functions directly. ''' + if tags is None: + tags = [] logging.info("Entering matched-filtering setup module.") make_analysis_dir(output_dir) cp = workflow.cp @@ -137,7 +139,7 @@ def setup_matchedfltr_workflow(workflow, science_segs, datafind_outs, def setup_matchedfltr_dax_generated(workflow, science_segs, datafind_outs, tmplt_banks, output_dir, injection_file=None, gate_files=None, - tags=[], link_to_tmpltbank=False, + tags=None, link_to_tmpltbank=False, compatibility_mode=False): ''' Setup matched-filter jobs that are generated as part of the workflow. @@ -181,6 +183,8 @@ def setup_matchedfltr_dax_generated(workflow, science_segs, datafind_outs, If you require access to any intermediate products produced at this stage you can call the various sub-functions directly. ''' + if tags is None: + tags = [] # Need to get the exe to figure out what sections are analysed, what is # discarded etc. This should *not* be hardcoded, so using a new executable # will require a bit of effort here .... @@ -234,7 +238,7 @@ def setup_matchedfltr_dax_generated(workflow, science_segs, datafind_outs, def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs, tmplt_banks, output_dir, injection_file=None, gate_files=None, - tags=[], link_to_tmpltbank=False, + tags=None, link_to_tmpltbank=False, compatibility_mode=False): ''' Setup matched-filter jobs that are generated as part of the workflow in @@ -275,6 +279,8 @@ def setup_matchedfltr_dax_generated_multi(workflow, science_segs, datafind_outs, If you require access to any intermediate products produced at this stage you can call the various sub-functions directly. ''' + if tags is None: + tags = [] # Need to get the exe to figure out what sections are analysed, what is # discarded etc. This should *not* be hardcoded, so using a new executable # will require a bit of effort here .... diff --git a/pycbc/workflow/plotting.py b/pycbc/workflow/plotting.py index 179565470a6..9c0338e27e8 100644 --- a/pycbc/workflow/plotting.py +++ b/pycbc/workflow/plotting.py @@ -98,7 +98,9 @@ def make_spectrum_plot(workflow, psd_files, out_dir, tags=None, precalc_psd_file workflow += node return node.output_files[0] -def make_segments_plot(workflow, seg_files, out_dir, tags=[]): +def make_segments_plot(workflow, seg_files, out_dir, tags=None): + if tags is None: + tags = [] makedir(out_dir) node = PlotExecutable(workflow.cp, 'plot_segments', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() @@ -106,7 +108,9 @@ def make_segments_plot(workflow, seg_files, out_dir, tags=[]): node.new_output_file_opt(workflow.analysis_time, '.html', '--output-file') workflow += node -def make_gating_plot(workflow, insp_files, out_dir, tags=[]): +def make_gating_plot(workflow, insp_files, out_dir, tags=None): + if tags is None: + tags = [] makedir(out_dir) node = PlotExecutable(workflow.cp, 'plot_gating', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() @@ -130,7 +134,9 @@ def make_foreground_table(workflow, trig_file, bank_file, ftag, out_dir, workflow += node return node.output_files[0] -def make_sensitivity_plot(workflow, inj_file, out_dir, exclude=None, require=None, tags=[]): +def make_sensitivity_plot(workflow, inj_file, out_dir, exclude=None, require=None, tags=None): + if tags is None: + tags = [] makedir(out_dir) secs = requirestr(workflow.cp.get_subsections('plot_sensitivity'), require) secs = excludestr(secs, exclude) @@ -145,7 +151,9 @@ def make_sensitivity_plot(workflow, inj_file, out_dir, exclude=None, require=Non return files def make_coinc_snrchi_plot(workflow, inj_file, inj_trig, stat_file, trig_file, out_dir, - exclude=None, require=None, tags=[]): + exclude=None, require=None, tags=None): + if tags is None: + tags = [] makedir(out_dir) secs = requirestr(workflow.cp.get_subsections('plot_coinc_snrchi'), require) secs = excludestr(secs, exclude) @@ -162,7 +170,9 @@ def make_coinc_snrchi_plot(workflow, inj_file, inj_trig, stat_file, trig_file, o files += node.output_files return files -def make_inj_table(workflow, inj_file, out_dir, missed=False, singles=None, tags=[]): +def make_inj_table(workflow, inj_file, out_dir, missed=False, singles=None, tags=None): + if tags is None: + tags = [] makedir(out_dir) node = PlotExecutable(workflow.cp, 'page_injections', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() @@ -259,7 +269,9 @@ def make_ifar_plot(workflow, trigger_file, out_dir, tags=None): return node.output_files[0] def make_snrchi_plot(workflow, trig_files, veto_file, veto_name, - out_dir, exclude=None, require=None, tags=[]): + out_dir, exclude=None, require=None, tags=None): + if tags is None: + tags = [] makedir(out_dir) secs = requirestr(workflow.cp.get_subsections('plot_snrchi'), require) secs = excludestr(secs, exclude) @@ -280,7 +292,9 @@ def make_snrchi_plot(workflow, trig_files, veto_file, veto_name, files += node.output_files return files -def make_foundmissed_plot(workflow, inj_file, out_dir, exclude=None, require=None, tags=[]): +def make_foundmissed_plot(workflow, inj_file, out_dir, exclude=None, require=None, tags=None): + if tags is None: + tags = [] makedir(out_dir) secs = requirestr(workflow.cp.get_subsections('plot_foundmissed'), require) secs = excludestr(secs, exclude) @@ -296,7 +310,9 @@ def make_foundmissed_plot(workflow, inj_file, out_dir, exclude=None, require=Non files += node.output_files return files -def make_snrratehist_plot(workflow, bg_file, out_dir, closed_box=False, tags=[]): +def make_snrratehist_plot(workflow, bg_file, out_dir, closed_box=False, tags=None): + if tags is None: + tags = [] makedir(out_dir) node = PlotExecutable(workflow.cp, 'plot_snrratehist', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() @@ -310,7 +326,9 @@ def make_snrratehist_plot(workflow, bg_file, out_dir, closed_box=False, tags=[]) return node.output_files[0] -def make_snrifar_plot(workflow, bg_file, out_dir, closed_box=False, cumulative=True, tags=[]): +def make_snrifar_plot(workflow, bg_file, out_dir, closed_box=False, cumulative=True, tags=None): + if tags is None: + tags = [] makedir(out_dir) node = PlotExecutable(workflow.cp, 'plot_snrifar', ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() @@ -339,7 +357,9 @@ def make_results_web_page(workflow, results_dir): def make_single_hist(workflow, trig_file, veto_file, veto_name, out_dir, bank_file=None, exclude=None, - require=None, tags=[]): + require=None, tags=None): + if tags is None: + tags = [] makedir(out_dir) secs = requirestr(workflow.cp.get_subsections('plot_hist'), require) secs = excludestr(secs, exclude) @@ -361,7 +381,9 @@ def make_single_hist(workflow, trig_file, veto_file, veto_name, return files def make_singles_plot(workflow, trig_files, bank_file, veto_file, veto_name, - out_dir, exclude=None, require=None, tags=[]): + out_dir, exclude=None, require=None, tags=None): + if tags is None: + tags = [] makedir(out_dir) secs = requirestr(workflow.cp.get_subsections('plot_singles'), require) secs = excludestr(secs, exclude) diff --git a/pycbc/workflow/postprocessing_cohptf.py b/pycbc/workflow/postprocessing_cohptf.py index a68d575b36e..110bf0a0056 100644 --- a/pycbc/workflow/postprocessing_cohptf.py +++ b/pycbc/workflow/postprocessing_cohptf.py @@ -43,7 +43,7 @@ def setup_coh_PTF_post_processing(workflow, trigger_files, trigger_cache, output_dir, segment_dir, injection_trigger_files=None, injection_files=None, injection_trigger_caches=None, injection_caches=None, config_file=None, run_dir=None, ifos=None, - web_dir=None, segments_plot=None, inj_tags=[], tags=[], **kwargs): + web_dir=None, segments_plot=None, inj_tags=None, tags=None, **kwargs): """ This function aims to be the gateway for running postprocessing in CBC offline workflows. Post-processing generally consists of calculating the @@ -72,6 +72,10 @@ def setup_coh_PTF_post_processing(workflow, trigger_files, trigger_cache, A list of the output from this stage. """ + if inj_tags is None: + inj_tags = [] + if tags is None: + tags = [] logging.info("Entering post-processing stage.") make_analysis_dir(output_dir) @@ -103,7 +107,7 @@ def setup_postproc_coh_PTF_workflow(workflow, trig_files, trig_cache, inj_trig_files, inj_files, inj_trig_caches, inj_caches, config_file, output_dir, html_dir, segment_dir, segs_plot, ifos, - inj_tags=[], tags=[]): + inj_tags=None, tags=None): """ This module sets up the post-processing stage in the workflow, using a coh_PTF style set up. This consists of running trig_combiner to find @@ -121,6 +125,10 @@ def setup_postproc_coh_PTF_workflow(workflow, trig_files, trig_cache, -------- """ + if inj_tags is None: + inj_tags = [] + if tags is None: + tags = [] cp = workflow.cp full_segment = trig_files[0].segment trig_name = cp.get("workflow", "trigger-name") diff --git a/pycbc/workflow/psdfiles.py b/pycbc/workflow/psdfiles.py index 260978ccffc..587de15d6c1 100644 --- a/pycbc/workflow/psdfiles.py +++ b/pycbc/workflow/psdfiles.py @@ -36,7 +36,7 @@ from pycbc.workflow.core import File, FileList, make_analysis_dir, resolve_url def setup_psd_workflow(workflow, science_segs, datafind_outs, - output_dir=None, tags=[]): + output_dir=None, tags=None): ''' Setup static psd section of CBC workflow. At present this only supports pregenerated psd files, in the future these could be created within the workflow. @@ -61,6 +61,8 @@ def setup_psd_workflow(workflow, science_segs, datafind_outs, psd_files : pycbc.workflow.core.FileList The FileList holding the psd files, 0 or 1 per ifo ''' + if tags is None: + tags = [] logging.info("Entering static psd module.") make_analysis_dir(output_dir) cp = workflow.cp @@ -86,7 +88,7 @@ def setup_psd_workflow(workflow, science_segs, datafind_outs, return psd_files -def setup_psd_pregenerated(workflow, tags=[]): +def setup_psd_pregenerated(workflow, tags=None): ''' Setup CBC workflow to use pregenerated psd files. The file given in cp.get('workflow','pregenerated-psd-file-(ifo)') will @@ -106,6 +108,8 @@ def setup_psd_pregenerated(workflow, tags=[]): psd_files : pycbc.workflow.core.FileList The FileList holding the gating files ''' + if tags is None: + tags = [] psd_files = FileList([]) cp = workflow.cp diff --git a/pycbc/workflow/segment.py b/pycbc/workflow/segment.py index aee364fe3cc..6c646a405f5 100644 --- a/pycbc/workflow/segment.py +++ b/pycbc/workflow/segment.py @@ -911,7 +911,7 @@ def get_cumulative_segs(workflow, categories, seg_files_list, out_dir, return outfile def add_cumulative_files(workflow, output_file, input_files, out_dir, - execute_now=False, tags=[]): + execute_now=False, tags=None): """ Function to combine a set of segment files into a single one. This function will not merge the segment lists but keep each separate. @@ -932,6 +932,8 @@ def add_cumulative_files(workflow, output_file, input_files, out_dir, tags : list of strings, optional A list of strings that is used to identify this job """ + if tags is None: + tags = [] llwadd_job = LigolwAddExecutable(workflow.cp, 'llwadd', ifo=output_file.ifo_list, out_dir=out_dir, tags=tags) add_node = llwadd_job.create_node(output_file.segment, input_files, diff --git a/pycbc/workflow/splittable.py b/pycbc/workflow/splittable.py index 9ad7bd08192..29de746ad00 100644 --- a/pycbc/workflow/splittable.py +++ b/pycbc/workflow/splittable.py @@ -68,7 +68,7 @@ def select_splitfilejob_instance(curr_exe): return exe_class -def setup_splittable_workflow(workflow, input_tables, out_dir=None, tags=[]): +def setup_splittable_workflow(workflow, input_tables, out_dir=None, tags=None): ''' This function aims to be the gateway for code that is responsible for taking some input file containing some table, and splitting into multiple files @@ -90,6 +90,8 @@ def setup_splittable_workflow(workflow, input_tables, out_dir=None, tags=[]): split_table_outs : pycbc.workflow.core.FileList The list of split up files as output from this job. ''' + if tags is None: + tags = [] logging.info("Entering split output files module.") make_analysis_dir(out_dir) # Parse for options in .ini file diff --git a/pycbc/workflow/tmpltbank.py b/pycbc/workflow/tmpltbank.py index 09416cb8d03..f1ba0782c3f 100644 --- a/pycbc/workflow/tmpltbank.py +++ b/pycbc/workflow/tmpltbank.py @@ -38,7 +38,7 @@ from pycbc.workflow.jobsetup import select_tmpltbank_class, select_matchedfilter_class, sngl_ifo_job_setup def setup_tmpltbank_workflow(workflow, science_segs, datafind_outs, - output_dir=None, psd_files=None, tags=[]): + output_dir=None, psd_files=None, tags=None): ''' Setup template bank section of CBC workflow. This function is responsible for deciding which of the various template bank workflow generation @@ -66,6 +66,8 @@ def setup_tmpltbank_workflow(workflow, science_segs, datafind_outs, tmplt_banks : pycbc.workflow.core.FileList The FileList holding the details of all the template bank jobs. ''' + if tags is None: + tags = [] logging.info("Entering template bank generation module.") make_analysis_dir(output_dir) cp = workflow.cp @@ -131,7 +133,7 @@ def setup_tmpltbank_workflow(workflow, science_segs, datafind_outs, return tmplt_banks def setup_tmpltbank_dax_generated(workflow, science_segs, datafind_outs, - output_dir, tags=[], + output_dir, tags=None, link_to_matchedfltr=True, compatibility_mode=False, psd_files=None): @@ -171,6 +173,8 @@ def setup_tmpltbank_dax_generated(workflow, science_segs, datafind_outs, tmplt_banks : pycbc.workflow.core.FileList The FileList holding the details of all the template bank jobs. ''' + if tags is None: + tags = [] cp = workflow.cp # Need to get the exe to figure out what sections are analysed, what is # discarded etc. This should *not* be hardcoded, so using a new executable @@ -228,7 +232,7 @@ def setup_tmpltbank_dax_generated(workflow, science_segs, datafind_outs, return tmplt_banks def setup_tmpltbank_without_frames(workflow, output_dir, - tags=[], independent_ifos=False, + tags=None, independent_ifos=False, psd_files=None): ''' Setup CBC workflow to use a template bank (or banks) that are generated in @@ -257,6 +261,8 @@ def setup_tmpltbank_without_frames(workflow, output_dir, tmplt_banks : pycbc.workflow.core.FileList The FileList holding the details of the template bank(s). ''' + if tags is None: + tags = [] cp = workflow.cp # Need to get the exe to figure out what sections are analysed, what is # discarded etc. This should *not* be hardcoded, so using a new executable @@ -300,7 +306,7 @@ def setup_tmpltbank_without_frames(workflow, output_dir, return tmplt_banks -def setup_tmpltbank_pregenerated(workflow, tags=[]): +def setup_tmpltbank_pregenerated(workflow, tags=None): ''' Setup CBC workflow to use a pregenerated template bank. The bank given in cp.get('workflow','pregenerated-template-bank') will be used @@ -321,6 +327,8 @@ def setup_tmpltbank_pregenerated(workflow, tags=[]): tmplt_banks : pycbc.workflow.core.FileList The FileList holding the details of the template bank. ''' + if tags is None: + tags = [] # Currently this uses the *same* fixed bank for all ifos. # Maybe we want to add capability to analyse separate banks in all ifos? diff --git a/test/fft_base.py b/test/fft_base.py index e41eec82e24..4ad55780027 100644 --- a/test/fft_base.py +++ b/test/fft_base.py @@ -301,7 +301,7 @@ def _test_lal_tf_ifft(test_case,inarr,outarr,tol): emsg = "Direct call to LAL TimeFreqFFT() did not agree with fft() to within precision {0}".format(tol) tc.assertTrue(outarr.almost_equal_norm(cmparr,tol=tol),msg=emsg) -def _test_raise_excep_fft(test_case,inarr,outarr,other_args={}): +def _test_raise_excep_fft(test_case,inarr,outarr,other_args=None): # As far as can be told from the unittest module documentation, the # 'assertRaises' tests do not permit a custom message. So more # comments than usual here, to help diagnose and test failures. @@ -310,6 +310,8 @@ def _test_raise_excep_fft(test_case,inarr,outarr,other_args={}): # the constructors of some types (T/F series); we cannot simply copy since # the whole point is to vary the input/output in some way that should cause # an exception. + if other_args is None: + other_args = {} tc = test_case with tc.context: outty = type(outarr) @@ -337,7 +339,7 @@ def _test_raise_excep_fft(test_case,inarr,outarr,other_args={}): args = [in_badtype,outarr,tc.backends] tc.assertRaises(TypeError,pycbc.fft.fft,*args) -def _test_raise_excep_ifft(test_case,inarr,outarr,other_args={}): +def _test_raise_excep_ifft(test_case,inarr,outarr,other_args=None): # As far as can be told from the unittest module documentation, the # 'assertRaises' tests do not permit a custom message. So more # comments than usual here, to help diagnose and test failures. @@ -346,6 +348,8 @@ def _test_raise_excep_ifft(test_case,inarr,outarr,other_args={}): # the constructors of some types (T/F series); we cannot simply copy since # the whole point is to vary the input/output in some way that should cause # an exception. + if other_args is None: + other_args = {} tc = test_case with tc.context: outty = type(outarr) diff --git a/tools/static/runtime-scipy.py b/tools/static/runtime-scipy.py index 64320d14328..41a4f847135 100644 --- a/tools/static/runtime-scipy.py +++ b/tools/static/runtime-scipy.py @@ -23,17 +23,21 @@ def find(pattern, path): # add the libaries to the include and library directories import scipy.weave.inline_tools scipy.weave.inline_tools.inline_real = scipy.weave.inline_tools.inline -def inline(code,arg_names=[],local_dict=None, global_dict=None, +def inline(code,arg_names=None,local_dict=None, global_dict=None, force=0, compiler='', verbose=0, support_code=None, - headers=[], + headers=None, customize=None, type_converters=None, auto_downcast=1, newarr_converter=0, **kw): + if arg_names is None: + arg_names = [] + if headers is None: + headers = [] kw['library_dirs'] = [basedir] kw['include_dirs'] = [os.path.join(basedir, 'include')] call_frame = sys._getframe().f_back