Skip to content

Commit

Permalink
Merge branch 'gwastro:master' into Executable_doc
Browse files Browse the repository at this point in the history
  • Loading branch information
pannarale authored Dec 18, 2024
2 parents 3f9a856 + 242620a commit 603eb14
Show file tree
Hide file tree
Showing 27 changed files with 2,160 additions and 730 deletions.
12 changes: 6 additions & 6 deletions bin/all_sky_search/pycbc_add_statmap
Original file line number Diff line number Diff line change
Expand Up @@ -308,14 +308,14 @@ if injection_style:
for bg_fname in args.background_files:
bg_f = pycbc.io.HFile(bg_fname, 'r')
ifo_combo_key = bg_f.attrs['ifos'].replace(' ','')
_, far[ifo_combo_key] = significance.get_far(
_, far[ifo_combo_key], _ = significance.get_far(
bg_f['background/stat'][:],
f['foreground/stat'][:],
bg_f['background/decimation_factor'][:],
bg_f.attrs['background_time'],
**significance_dict[ifo_combo_key])

_, far_exc[ifo_combo_key] = \
_, far_exc[ifo_combo_key], _ = \
significance.get_far(
bg_f['background_exc/stat'][:],
f['foreground/stat'][:],
Expand All @@ -328,15 +328,15 @@ else:
# background included
for f_in in files:
ifo_combo_key = get_ifo_string(f_in).replace(' ','')
_, far[ifo_combo_key] = \
_, far[ifo_combo_key], _ = \
significance.get_far(
f_in['background/stat'][:],
f['foreground/stat'][:],
f_in['background/decimation_factor'][:],
f_in.attrs['background_time'],
**significance_dict[ifo_combo_key])

_, far_exc[ifo_combo_key] = \
_, far_exc[ifo_combo_key], _ = \
significance.get_far(
f_in['background_exc/stat'][:],
f['foreground/stat'][:],
Expand Down Expand Up @@ -607,7 +607,7 @@ while True:
fg_time_ct[key] -= args.cluster_window
bg_t_y = conv.sec_to_year(bg_time_ct[key])
fg_t_y = conv.sec_to_year(fg_time_ct[key])
bg_far, fg_far = significance.get_far(
bg_far, fg_far, _ = significance.get_far(
sep_bg_data[key].data['stat'],
sep_fg_data[key].data['stat'],
sep_bg_data[key].data['decimation_factor'],
Expand All @@ -631,7 +631,7 @@ while True:

logging.info("Recalculating combined IFARs")
for key in all_ifo_combos:
_, far[key] = significance.get_far(
_, far[key], _ = significance.get_far(
sep_bg_data[key].data['stat'],
combined_fg_data.data['stat'],
sep_bg_data[key].data['decimation_factor'],
Expand Down
14 changes: 10 additions & 4 deletions bin/all_sky_search/pycbc_coinc_statmap
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ fore_stat = all_trigs.stat[fore_locs]

# Cumulative array of inclusive background triggers and the number of
# inclusive background triggers louder than each foreground trigger
bg_far, fg_far = significance.get_far(
bg_far, fg_far, sig_info = significance.get_far(
back_stat,
fore_stat,
all_trigs.decimation_factor[back_locs],
Expand All @@ -248,7 +248,7 @@ bg_far, fg_far = significance.get_far(

# Cumulative array of exclusive background triggers and the number
# of exclusive background triggers louder than each foreground trigger
bg_far_exc, fg_far_exc = significance.get_far(
bg_far_exc, fg_far_exc, exc_sig_info = significance.get_far(
exc_zero_trigs.stat,
fore_stat,
exc_zero_trigs.decimation_factor,
Expand Down Expand Up @@ -286,10 +286,14 @@ if fore_locs.sum() > 0:
fap = 1 - numpy.exp(- coinc_time / ifar)
f['foreground/ifar'] = conv.sec_to_year(ifar)
f['foreground/fap'] = fap
for key, value in sig_info.items():
f['foreground'].attrs[key] = value
ifar_exc = 1. / fg_far_exc
fap_exc = 1 - numpy.exp(- coinc_time_exc / ifar_exc)
f['foreground/ifar_exc'] = conv.sec_to_year(ifar_exc)
f['foreground/fap_exc'] = fap_exc
for key, value in exc_sig_info.items():
f['foreground'].attrs[key + '_exc'] = value
else:
f['foreground/ifar'] = ifar = numpy.array([])
f['foreground/fap'] = numpy.array([])
Expand Down Expand Up @@ -427,7 +431,7 @@ while numpy.any(ifar_foreground >= background_time):
logging.info("Calculating FAN from background statistic values")
back_stat = all_trigs.stat[back_locs]
fore_stat = all_trigs.stat[fore_locs]
bg_far, fg_far = significance.get_far(
bg_far, fg_far, sig_info = significance.get_far(
back_stat,
fore_stat,
all_trigs.decimation_factor[back_locs],
Expand All @@ -454,7 +458,7 @@ while numpy.any(ifar_foreground >= background_time):
# Exclusive background doesn't change when removing foreground triggers.
# So we don't have to take background ifar, just repopulate ifar_foreground
else :
_, fg_far_exc = significance.get_far(
_, fg_far_exc, _ = significance.get_far(
exc_zero_trigs.stat,
fore_stat,
exc_zero_trigs.decimation_factor,
Expand All @@ -481,6 +485,8 @@ while numpy.any(ifar_foreground >= background_time):
fap = 1 - numpy.exp(- coinc_time / ifar)
f['foreground_h%s/ifar' % h_iterations] = conv.sec_to_year(ifar)
f['foreground_h%s/fap' % h_iterations] = fap
for key, value in sig_info.items():
f['foreground_h%' % h_iterations].attrs[key] = value

# Update ifar and fap for other foreground triggers
for i in range(len(ifar)):
Expand Down
4 changes: 3 additions & 1 deletion bin/all_sky_search/pycbc_coinc_statmap_inj
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ f.attrs['foreground_time'] = coinc_time

if len(zdata) > 0:

_, fg_far_exc = significance.get_far(
_, fg_far_exc, exc_sig_info = significance.get_far(
back_stat,
zdata.stat,
dec_fac,
Expand All @@ -105,6 +105,8 @@ if len(zdata) > 0:
fap_exc = 1 - numpy.exp(- coinc_time / ifar_exc)
f['foreground/ifar_exc'] = conv.sec_to_year(ifar_exc)
f['foreground/fap_exc'] = fap_exc
for key, value in exc_sig_info.items():
f['foreground'].attrs[key + '_exc'] = value

else:
f['foreground/ifar_exc'] = numpy.array([])
Expand Down
4 changes: 3 additions & 1 deletion bin/all_sky_search/pycbc_exclude_zerolag
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ for k in filtered_trigs.data:
f_out['background_exc/%s' % k] = filtered_trigs.data[k]

logging.info('Recalculating IFARs')
bg_far, fg_far = significance.get_far(
bg_far, fg_far, sig_info = significance.get_far(
filtered_trigs.data['stat'],
f_in['foreground/stat'][:],
filtered_trigs.data['decimation_factor'],
Expand All @@ -107,6 +107,8 @@ bg_ifar_exc = 1. / bg_far
logging.info('Writing updated ifars to file')
f_out['foreground/ifar_exc'][:] = conv.sec_to_year(fg_ifar_exc)
f_out['background_exc/ifar'][:] = conv.sec_to_year(bg_ifar_exc)
for key, value in sig_info.items():
f_out['foreground'].attrs[key + '_exc'] = value

fg_time_exc = conv.sec_to_year(f_in.attrs['foreground_time_exc'])
f_out['foreground/fap_exc'][:] = 1 - np.exp(-fg_time_exc / fg_ifar_exc)
Expand Down
80 changes: 44 additions & 36 deletions bin/all_sky_search/pycbc_fit_sngls_over_multiparam
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def smooth_templates(nabove, invalphan, ntotal, template_idx,
-------------------
weights: ndarray
Weighting factor to apply to the templates specified by template_idx
If None, then numpy.average will revert to numpy.mean
Returns
-------
Expand All @@ -68,7 +69,6 @@ def smooth_templates(nabove, invalphan, ntotal, template_idx,
Third float: the smoothed total count in template value
"""
if weights is None: weights = numpy.ones_like(template_idx)
nabove_t_smoothed = numpy.average(nabove[template_idx], weights=weights)
ntotal_t_smoothed = numpy.average(ntotal[template_idx], weights=weights)
invalphan_mean = numpy.average(invalphan[template_idx], weights=weights)
Expand All @@ -90,7 +90,6 @@ def smooth_tophat(nabove, invalphan, ntotal, dists):
ntotal,
idx_within_area)


# This is the default number of triggers required for n_closest smoothing
_default_total_trigs = 500

Expand Down Expand Up @@ -119,7 +118,7 @@ def smooth_distance_weighted(nabove, invalphan, ntotal, dists):
Smooth templates weighted according to dists in a unit-width normal
distribution, truncated at three sigma
"""
idx_within_area = numpy.flatnonzero(dists < 3.)
idx_within_area = dists < 3.
weights = norm.pdf(dists[idx_within_area])
return smooth_templates(nabove, invalphan, ntotal,
idx_within_area, weights=weights)
Expand Down Expand Up @@ -172,6 +171,7 @@ def report_percentage(i, length):
if not pc % 10 and pc_last % 10:
logging.info(f"Template {i} out of {length} ({pc:.0f}%)")


parser = argparse.ArgumentParser(usage="",
description="Smooth (regress) the dependence of coefficients describing "
"single-ifo background trigger distributions on a template "
Expand Down Expand Up @@ -255,7 +255,7 @@ init_logging(args.verbose)
analysis_time = 0
attr_dict = {}

# These end up as n_files * n_templates arrays
# These end up as n_files * num_templates arrays
tid = numpy.array([], dtype=int)
nabove = numpy.array([], dtype=int)
ntotal = numpy.array([], dtype=int)
Expand Down Expand Up @@ -323,7 +323,7 @@ invalphan = invalpha * nabove
analysis_time /= len(args.template_fit_file)

if len(args.template_fit_file) > 1:
# From the n_templates * n_files arrays, average within each template.
# From the num_templates * n_files arrays, average within each template.
# To do this, we average the n_files occurrences which have the same tid

# The linearity of the average means that we can do this in two steps
Expand Down Expand Up @@ -404,10 +404,14 @@ for param, slog in zip(args.fit_param, args.log_param):
else:
raise ValueError("invalid log param argument, use 'true', or 'false'")

nabove_smoothed = []
alpha_smoothed = []
ntotal_smoothed = []
rang = numpy.arange(0, len(nabove))
rang = numpy.arange(0, num_templates)

# Preallocate memory for smoothing results
# smoothed_vals is an array containing smoothed template fit values :
# smoothed_vals[:,0] is the number of triggers above the fit threshold
# smoothed_vals[:,1] is the fit coefficient 'alpha'
# smoothed_vals[:,2] is the total number of triggers in the template
smoothed_vals = numpy.zeros((num_templates, 3))

# Handle the one-dimensional case of tophat smoothing separately
# as it is easier to optimize computational performance.
Expand All @@ -430,10 +434,10 @@ if len(parvals) == 1 and args.smoothing_method == 'smooth_tophat':
num = right - left

logging.info("Smoothing ...")
nabove_smoothed = (nasum[right] - nasum[left]) / num
smoothed_vals[:,0] = (nasum[right] - nasum[left]) / num
invmean = (invsum[right] - invsum[left]) / num
alpha_smoothed = nabove_smoothed / invmean
ntotal_smoothed = (ntsum[right] - ntsum[left]) / num
smoothed_vals[:,1] = smoothed_vals[:, 0] / invmean
smoothed_vals[:,2] = (ntsum[right] - ntsum[left]) / num

elif numpy.isfinite(_smooth_cut[args.smoothing_method]):
c = _smooth_cut[args.smoothing_method]
Expand All @@ -453,51 +457,55 @@ elif numpy.isfinite(_smooth_cut[args.smoothing_method]):
parvals[sort_dim] - cut_lengths[sort_dim])
rights = numpy.searchsorted(parvals[sort_dim],
parvals[sort_dim] + cut_lengths[sort_dim])
n_removed = len(parvals[0]) - rights + lefts
n_removed = num_templates - rights + lefts
logging.info("Cutting between %d and %d templates for each smoothing",
n_removed.min(), n_removed.max())

# Sort the values to be smoothed by parameter value
logging.info("Smoothing ...")
slices = [slice(l,r) for l, r in zip(lefts, rights)]
nabove_sort = nabove[par_sort]
invalphan_sort = invalphan[par_sort]
ntotal_sort = ntotal[par_sort]
slices = [slice(l, r) for l, r in zip(lefts, rights)]
for i in rang:
report_percentage(i, rang.max())
report_percentage(i, num_templates)
slc = slices[i]
d = dist(i, slc, parvals, args.smoothing_width)

smoothed_tuple = smooth(nabove[par_sort][slc],
invalphan[par_sort][slc],
ntotal[par_sort][slc],
d,
args.smoothing_method,
**kwarg_dict)
nabove_smoothed.append(smoothed_tuple[0])
alpha_smoothed.append(smoothed_tuple[1])
ntotal_smoothed.append(smoothed_tuple[2])
smoothed_vals[i,:] = smooth(
nabove_sort[slc],
invalphan_sort[slc],
ntotal_sort[slc],
d,
args.smoothing_method,
**kwarg_dict
)

# Undo the sorts
unsort = numpy.argsort(par_sort)
parvals = [p[unsort] for p in parvals]
nabove_smoothed = numpy.array(nabove_smoothed)[unsort]
alpha_smoothed = numpy.array(alpha_smoothed)[unsort]
ntotal_smoothed = numpy.array(ntotal_smoothed)[unsort]
smoothed_vals = smoothed_vals[unsort, :]

else:
logging.info("Smoothing ...")
for i in rang:
report_percentage(i, rang.max())
report_percentage(i, num_templates)
d = dist(i, rang, parvals, args.smoothing_width)
smoothed_tuple = smooth(nabove, invalphan, ntotal, d,
args.smoothing_method, **kwarg_dict)
nabove_smoothed.append(smoothed_tuple[0])
alpha_smoothed.append(smoothed_tuple[1])
ntotal_smoothed.append(smoothed_tuple[2])
smoothed_vals[i, :] = smooth(
nabove,
invalphan,
ntotal,
d,
args.smoothing_method,
**kwarg_dict
)

logging.info("Writing output")
outfile = HFile(args.output, 'w')
outfile['template_id'] = tid
outfile['count_above_thresh'] = nabove_smoothed
outfile['fit_coeff'] = alpha_smoothed
outfile['count_in_template'] = ntotal_smoothed
outfile['count_above_thresh'] = smoothed_vals[:, 0]
outfile['fit_coeff'] = smoothed_vals[:, 1]
outfile['count_in_template'] = smoothed_vals[:, 2]
if median_sigma is not None:
outfile['median_sigma'] = median_sigma

Expand Down
Loading

0 comments on commit 603eb14

Please sign in to comment.