Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix "Avoid mutable default arguments" issue #2

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .landscape.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
doc-warnings: true
test-warnings: no
strictness: medium
strictness: high
autodetect: yes
ignore-patterns:
- */__init__.py
- examples/*
- test/*
- docs/*
Expand Down
4 changes: 3 additions & 1 deletion pycbc/fft/fft_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,9 +199,11 @@ def compile(source, name):
fhash.write(str(hash(source)))
return lib_file

def get_fn_plan(callback=None, out_callback=None, name='pycbc_cufft', parameters=[]):
def get_fn_plan(callback=None, out_callback=None, name='pycbc_cufft', parameters=None):
""" Get the IFFT execute and plan functions
"""
if parameters is None:
parameters = []
source = fftsrc.render(input_callback=callback, output_callback=out_callback, parameters=parameters)
path = compile(source, name)
lib = ctypes.cdll.LoadLibrary(path)
Expand Down
4 changes: 3 additions & 1 deletion pycbc/io/record.py
Original file line number Diff line number Diff line change
Expand Up @@ -1146,12 +1146,14 @@ def default_fields(cls, include_virtual=True, **kwargs):
return dict(cls._staticfields.items() + add_fields.items())


def __new__(cls, shape, name=None, additional_fields=None, field_kwargs={},
def __new__(cls, shape, name=None, additional_fields=None, field_kwargs=None,
**kwargs):
"""The ``additional_fields`` should be specified in the same way as
``dtype`` is normally given to FieldArray. The ``field_kwargs`` are
passed to the class's default_fields method as keyword arguments.
"""
if field_kwargs is None:
field_kwargs = {}
if 'names' in kwargs and 'dtype' in kwargs:
raise ValueError("Please provide names or dtype, not both")
default_fields = cls.default_fields(include_virtual=False,
Expand Down
4 changes: 3 additions & 1 deletion pycbc/results/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def load_png_metadata(filename):
'.html':load_html_metadata,
}

def save_fig_with_metadata(fig, filename, fig_kwds={}, **kwds):
def save_fig_with_metadata(fig, filename, fig_kwds=None, **kwds):
""" Save plot to file with metadata included. Kewords translate to metadata
that is stored directly in the plot file. Limited format types available.

Expand All @@ -105,6 +105,8 @@ def save_fig_with_metadata(fig, filename, fig_kwds={}, **kwds):
filename: str
Name of file to store the plot.
"""
if fig_kwds is None:
fig_kwds = {}
try:
extension = os.path.splitext(filename)[1]
kwds['version'] = pycbc.version.git_verbose_msg
Expand Down
8 changes: 6 additions & 2 deletions pycbc/tmpltbank/bank_output_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def return_empty_sngl():
return sngl

def return_search_summary(start_time=0, end_time=0, nevents=0,
ifos=[], **kwargs):
ifos=None, **kwargs):
"""
Function to create a SearchSummary object where all columns are populated
but all are set to values that test False (ie. strings to '', floats/ints
Expand All @@ -59,6 +59,8 @@ def return_search_summary(start_time=0, end_time=0, nevents=0,
lsctables.SeachSummary
The "empty" SearchSummary object.
"""
if ifos is None:
ifos = []

# create an empty search summary
search_summary = lsctables.SearchSummary()
Expand Down Expand Up @@ -283,7 +285,7 @@ def calculate_ethinca_metric_comps(metricParams, ethincaParams, mass1, mass2,
return fMax_theor, gammaVals

def output_sngl_inspiral_table(outputFile, tempBank, metricParams,
ethincaParams, programName="", optDict = {},
ethincaParams, programName="", optDict = None,
outdoc=None, **kwargs):
"""
Function that converts the information produced by the various pyCBC bank
Expand Down Expand Up @@ -317,6 +319,8 @@ def output_sngl_inspiral_table(outputFile, tempBank, metricParams,
All other key word arguments will be passed directly to
ligolw_process.register_to_xmldoc
"""
if optDict is None:
optDict = {}
if outdoc is None:
outdoc = ligolw.Document()
outdoc.appendChild(ligolw.LIGO_LW())
Expand Down
48 changes: 36 additions & 12 deletions pycbc/workflow/coincidence.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ class PyCBCFindCoincExecutable(Executable):
""" Find coinc triggers using a folded interval method
"""
current_retention_level = Executable.ALL_TRIGGERS
def create_node(self, trig_files, bank_file, veto_file, veto_name, template_str, tags=[]):
def create_node(self, trig_files, bank_file, veto_file, veto_name, template_str, tags=None):
if tags is None:
tags = []
segs = trig_files.get_times_covered_by_files()
seg = segments.segment(segs[0][0], segs[-1][1])
node = Node(self)
Expand All @@ -75,7 +77,9 @@ class PyCBCStatMapExecutable(Executable):
""" Calculate FAP, IFAR, etc
"""
current_retention_level = Executable.MERGED_TRIGGERS
def create_node(self, coinc_files, tags=[]):
def create_node(self, coinc_files, tags=None):
if tags is None:
tags = []
segs = coinc_files.get_times_covered_by_files()
seg = segments.segment(segs[0][0], segs[-1][1])

Expand All @@ -89,7 +93,9 @@ class PyCBCStatMapInjExecutable(Executable):
""" Calculate FAP, IFAR, etc
"""
current_retention_level = Executable.MERGED_TRIGGERS
def create_node(self, zerolag, full_data, injfull, fullinj, tags=[]):
def create_node(self, zerolag, full_data, injfull, fullinj, tags=None):
if tags is None:
tags = []
segs = zerolag.get_times_covered_by_files()
seg = segments.segment(segs[0][0], segs[-1][1])

Expand All @@ -106,7 +112,9 @@ class PyCBCHDFInjFindExecutable(Executable):
""" Find injections in the hdf files output
"""
current_retention_level = Executable.MERGED_TRIGGERS
def create_node(self, inj_coinc_file, inj_xml_file, veto_file, veto_name, tags=[]):
def create_node(self, inj_coinc_file, inj_xml_file, veto_file, veto_name, tags=None):
if tags is None:
tags = []
node = Node(self)
node.add_input_list_opt('--trigger-file', inj_coinc_file)
node.add_input_list_opt('--injection-file', inj_xml_file)
Expand All @@ -120,7 +128,9 @@ def create_node(self, inj_coinc_file, inj_xml_file, veto_file, veto_name, tags=[
class PyCBCDistributeBackgroundBins(Executable):
""" Distribute coinc files amoung different background bins """
current_retention_level = Executable.ALL_TRIGGERS
def create_node(self, coinc_files, bank_file, background_bins, tags=[]):
def create_node(self, coinc_files, bank_file, background_bins, tags=None):
if tags is None:
tags = []
node = Node(self)
node.add_input_list_opt('--coinc-files', coinc_files)
node.add_input_opt('--bank-file', bank_file)
Expand All @@ -140,7 +150,9 @@ def create_node(self, coinc_files, bank_file, background_bins, tags=[]):

class PyCBCCombineStatmap(Executable):
current_retention_level = Executable.MERGED_TRIGGERS
def create_node(self, stat_files, tags=[]):
def create_node(self, stat_files, tags=None):
if tags is None:
tags = []
node = Node(self)
node.add_input_list_opt('--statmap-files', stat_files)
node.new_output_file_opt(stat_files[0].segment, '.hdf', '--output-file', tags=tags)
Expand All @@ -165,7 +177,9 @@ def make_foreground_censored_veto(workflow, bg_file, veto_file, veto_name,
workflow += node
return node.output_files[0]

def merge_single_detector_hdf_files(workflow, bank_file, trigger_files, out_dir, tags=[]):
def merge_single_detector_hdf_files(workflow, bank_file, trigger_files, out_dir, tags=None):
if tags is None:
tags = []
make_analysis_dir(out_dir)
out = FileList()
for ifo in workflow.ifos:
Expand All @@ -179,7 +193,9 @@ def merge_single_detector_hdf_files(workflow, bank_file, trigger_files, out_dir,
return out

def find_injections_in_hdf_coinc(workflow, inj_coinc_file, inj_xml_file,
veto_file, veto_name, out_dir, tags=[]):
veto_file, veto_name, out_dir, tags=None):
if tags is None:
tags = []
make_analysis_dir(out_dir)
exe = PyCBCHDFInjFindExecutable(workflow.cp, 'hdfinjfind',
ifos=workflow.ifos,
Expand All @@ -188,9 +204,11 @@ def find_injections_in_hdf_coinc(workflow, inj_coinc_file, inj_xml_file,
workflow += node
return node.output_files[0]

def convert_bank_to_hdf(workflow, xmlbank, out_dir, tags=[]):
def convert_bank_to_hdf(workflow, xmlbank, out_dir, tags=None):
"""Return the template bank in hdf format
"""
if tags is None:
tags = []
#FIXME, make me not needed
if len(xmlbank) > 1:
raise ValueError('Can only convert a single template bank')
Expand All @@ -204,9 +222,11 @@ def convert_bank_to_hdf(workflow, xmlbank, out_dir, tags=[]):
workflow.add_node(bank2hdf_node)
return bank2hdf_node.output_files

def convert_trig_to_hdf(workflow, hdfbank, xml_trigger_files, out_dir, tags=[]):
def convert_trig_to_hdf(workflow, hdfbank, xml_trigger_files, out_dir, tags=None):
"""Return the list of hdf5 trigger files outpus
"""
if tags is None:
tags = []
#FIXME, make me not needed
logging.info('convert single inspiral trigger files to hdf5')
make_analysis_dir(out_dir)
Expand Down Expand Up @@ -325,11 +345,13 @@ def setup_background_bins_inj(workflow, coinc_files, background_file, bank_file,
return cstat_node.output_files[0]

def setup_interval_coinc_inj(workflow, hdfbank, full_data_trig_files, inj_trig_files,
background_file, veto_file, veto_name, out_dir, tags=[]):
background_file, veto_file, veto_name, out_dir, tags=None):
"""
This function sets up exact match coincidence and background estimation
using a folded interval technique.
"""
if tags is None:
tags = []
make_analysis_dir(out_dir)
logging.info('Setting up coincidence for injection')

Expand Down Expand Up @@ -374,11 +396,13 @@ def setup_interval_coinc_inj(workflow, hdfbank, full_data_trig_files, inj_trig_f
return setup_statmap_inj(workflow, bg_files, background_file, hdfbank, out_dir, tags=tags)

def setup_interval_coinc(workflow, hdfbank, trig_files,
veto_files, veto_names, out_dir, tags=[]):
veto_files, veto_names, out_dir, tags=None):
"""
This function sets up exact match coincidence and background estimation
using a folded interval technique.
"""
if tags is None:
tags = []
make_analysis_dir(out_dir)
logging.info('Setting up coincidence')

Expand Down
6 changes: 5 additions & 1 deletion pycbc/workflow/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ class WorkflowConfigParser(glue.pipeline.DeepCopyableConfigParser):
This is a sub-class of glue.pipeline.DeepCopyableConfigParser, which lets
us add a few additional helper features that are useful in workflows.
"""
def __init__(self, configFiles=[], overrideTuples=[], parsedFilePath=None):
def __init__(self, configFiles=None, overrideTuples=None, parsedFilePath=None):
"""
Initialize an WorkflowConfigParser. This reads the input configuration
files, overrides values if necessary and performs the interpolation.
Expand All @@ -160,6 +160,10 @@ def __init__(self, configFiles=[], overrideTuples=[], parsedFilePath=None):
WorkflowConfigParser
Initialized WorkflowConfigParser instance.
"""
if configFiles is None:
configFiles = []
if overrideTuples is None:
overrideTuples = []
glue.pipeline.DeepCopyableConfigParser.__init__(self)

# Enable case sensitive options
Expand Down
12 changes: 9 additions & 3 deletions pycbc/workflow/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ class Executable(pegasus_workflow.Executable):
# file will be retained, but a warning given
current_retention_level = KEEP_BUT_RAISE_WARNING
def __init__(self, cp, name,
universe=None, ifos=None, out_dir=None, tags=[]):
universe=None, ifos=None, out_dir=None, tags=None):
"""
Initialize the Executable class.

Expand All @@ -146,6 +146,8 @@ def __init__(self, cp, name,
tags : list of strings
A list of strings that is used to identify this job.
"""
if tags is None:
tags = []
tags = [tag.upper() for tag in tags]
self.tags = tags
if isinstance(ifos, (str, unicode)):
Expand Down Expand Up @@ -559,7 +561,7 @@ def get_command_line(self):

return [exe_path] + arglist

def new_output_file_opt(self, valid_seg, extension, option_name, tags=[],
def new_output_file_opt(self, valid_seg, extension, option_name, tags=None,
store_file=None, use_tmp_subdirs=False):
"""
This function will create a workflow.File object corresponding to the given
Expand All @@ -584,6 +586,8 @@ def new_output_file_opt(self, valid_seg, extension, option_name, tags=[],
in the specified output location if True. If false file will be
removed when no longer needed in the workflow.
"""
if tags is None:
tags = []

# Changing this from set(tags) to enforce order. It might make sense
# for all jobs to have file names with tags in the same order.
Expand Down Expand Up @@ -634,14 +638,16 @@ def add_multiifo_output_list_opt(self, opt, outputs):
self._add_output(outfile)

def new_multiifo_output_list_opt(self, opt, ifos, analysis_time, extension,
tags=[], store_file=None,
tags=None, store_file=None,
use_tmp_subdirs=False):
""" Add an option that determines a list of outputs from multiple
detectors. Files will be supplied as --opt ifo1:input1 ifo2:input2
.....
File names are created internally from the provided extension and
analysis time.
"""
if tags is None:
tags = []
all_tags = copy.deepcopy(self.executable.tags)
for tag in tags:
if tag not in all_tags:
Expand Down
8 changes: 6 additions & 2 deletions pycbc/workflow/gatefiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from pycbc.workflow.core import File, FileList, make_analysis_dir, resolve_url

def setup_gating_workflow(workflow, science_segs, datafind_outs,
output_dir=None, tags=[]):
output_dir=None, tags=None):
'''
Setup gating section of CBC workflow. At present this only supports pregenerated
gating files, in the future these could be created within the workflow.
Expand All @@ -61,6 +61,8 @@ def setup_gating_workflow(workflow, science_segs, datafind_outs,
gate_files : pycbc.workflow.core.FileList
The FileList holding the gate files, 0 or 1 per ifo
'''
if tags is None:
tags = []
logging.info("Entering gating module.")
make_analysis_dir(output_dir)
cp = workflow.cp
Expand All @@ -86,7 +88,7 @@ def setup_gating_workflow(workflow, science_segs, datafind_outs,
return gate_files


def setup_gate_pregenerated(workflow, tags=[]):
def setup_gate_pregenerated(workflow, tags=None):
'''
Setup CBC workflow to use pregenerated gating files.
The file given in cp.get('workflow','pregenerated-gating-file-(ifo)') will
Expand All @@ -105,6 +107,8 @@ def setup_gate_pregenerated(workflow, tags=[]):
gate_files : pycbc.workflow.core.FileList
The FileList holding the gating files
'''
if tags is None:
tags = []
gate_files = FileList([])

cp = workflow.cp
Expand Down
4 changes: 3 additions & 1 deletion pycbc/workflow/grb_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def make_exttrig_file(cp, ifos, sci_seg, out_dir):
return xml_file


def get_ipn_sky_files(workflow, tags=[]):
def get_ipn_sky_files(workflow, tags=None):
'''
Retreive the sky point files for searching over the IPN error box and
populating it with injections.
Expand All @@ -219,6 +219,8 @@ def get_ipn_sky_files(workflow, tags=[]):
ipn_files : pycbc.workflow.core.FileList
FileList holding the details of the IPN sky point files.
'''
if tags is None:
tags = []
cp = workflow.cp
ipn_search_points = cp.get("workflow-inspiral", "ipn-search-points")
ipn_search_points = resolve_url(ipn_search_points)
Expand Down
4 changes: 3 additions & 1 deletion pycbc/workflow/injection.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def cut_distant_injections(workflow, inj_file, out_dir, tags=None):

def setup_injection_workflow(workflow, output_dir=None,
inj_section_name='injections', exttrig_file=None,
tags =[]):
tags =None):
"""
This function is the gateway for setting up injection-generation jobs in a
workflow. It should be possible for this function to support a number
Expand Down Expand Up @@ -109,6 +109,8 @@ def setup_injection_workflow(workflow, output_dir=None,
identify them. The FileList class contains functions to search
based on tags.
"""
if tags is None:
tags = []
logging.info("Entering injection module.")
make_analysis_dir(output_dir)

Expand Down
Loading