Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #1295

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ repos:
- id: check-added-large-files

- repo: https://github.com/psf/black
rev: 23.11.0
rev: 23.12.0
hooks:
- id: black
args: [--safe, --line-length=100, --preview]
Expand All @@ -24,7 +24,7 @@ repos:
- id: docformatter

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.7.0
rev: v1.7.1
hooks:
- id: mypy
additional_dependencies: [
Expand Down
6 changes: 3 additions & 3 deletions bin/restrax
Original file line number Diff line number Diff line change
Expand Up @@ -315,9 +315,9 @@ class ReStrax(daq_core.DataBases):
self.log.info("No new work, looking for previously failed runs")
# Look for work which we tried before (and has a restrax field)
query.pop("restrax")
query.update({
"restrax.n_tries": {"$lt": self.max_tries + 1}, "restrax.state": {"$ne": "done"}
})
query.update(
{"restrax.n_tries": {"$lt": self.max_tries + 1}, "restrax.state": {"$ne": "done"}}
)
run_doc = self.run_coll.find_one(query, **kw)

if run_doc is not None:
Expand Down
6 changes: 3 additions & 3 deletions notebooks/tutorials/pulse_analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -767,9 +767,9 @@
"areas = rr[\"data\"][:, 40:70].sum(axis=1)\n",
"channels = rr[\"channel\"]\n",
"\n",
"gain_ests = np.array([\n",
" np.median(areas[channels == pmt]) for pmt in tqdm(np.arange(straxen.n_tpc_pmts))\n",
"])"
"gain_ests = np.array(\n",
" [np.median(areas[channels == pmt]) for pmt in tqdm(np.arange(straxen.n_tpc_pmts))]\n",
")"
]
},
{
Expand Down
6 changes: 3 additions & 3 deletions straxen/analyses/event_display.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,9 +244,9 @@ def _event_display(
_scatter_rec(event)

# Fill panels with peak/event info
for it, (ax, labels_and_unit) in enumerate([
(ax_event_info, display_event_info), (ax_peak_info, display_peak_info)
]):
for it, (ax, labels_and_unit) in enumerate(
[(ax_event_info, display_event_info), (ax_peak_info, display_peak_info)]
):
if ax is not None:
for i, (_lab, _unit) in enumerate(labels_and_unit):
coord = 0.01, 0.9 - 0.9 * i / len(labels_and_unit)
Expand Down
6 changes: 3 additions & 3 deletions straxen/analyses/posrec_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,9 @@ def load_corrected_positions(
itp_tmp = straxen.InterpolatingMap(straxen.common.get_resource(map_tmp, fmt="binary"))
itp_tmp.scale_coordinates([1.0, 1.0, -drift_speed])

orig_pos = np.vstack([
events[f"{s2_pre}s2_x_{algo}"], events[f"{s2_pre}s2_y_{algo}"], z_obs
]).T
orig_pos = np.vstack(
[events[f"{s2_pre}s2_x_{algo}"], events[f"{s2_pre}s2_y_{algo}"], z_obs]
).T
r_obs = np.linalg.norm(orig_pos[:, :2], axis=1)
delta_r = itp_tmp(orig_pos)
z_obs = z_obs + drift_speed * drift_time_gate
Expand Down
14 changes: 8 additions & 6 deletions straxen/contexts.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,12 +291,14 @@ def xenonnt_online(
# newer than 8796 are not affected. See:
# https://github.com/XENONnT/straxen/pull/166 and
# https://xe1t-wiki.lngs.infn.it/doku.php?id=xenon:xenonnt:dsg:daq:sector_swap
st.set_context_config({
"apply_data_function": (
straxen.remap_old,
straxen.check_loading_allowed,
)
})
st.set_context_config(
{
"apply_data_function": (
straxen.remap_old,
straxen.check_loading_allowed,
)
}
)
if _context_config_overwrite is not None:
warnings.warn(
f"_context_config_overwrite is deprecated, please pass to context as kwargs",
Expand Down
12 changes: 5 additions & 7 deletions straxen/holoviews_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,13 +436,11 @@ def _make_title(self, ind):
start_ns = start - (start // 10**9) * 10**9
end = self.df_event_time.loc[ind, "endtime"]
end_ns = end - start + start_ns
return "".join(
(
f"##Event {ind} from run {self.run_id}\n",
f"##Recorded at ({date[:10]} {date[10:]}) UTC ",
f"{start_ns} ns - {end_ns} ns",
)
)
return "".join((
f"##Event {ind} from run {self.run_id}\n",
f"##Recorded at ({date[:10]} {date[10:]}) UTC ",
f"{start_ns} ns - {end_ns} ns",
))


def plot_tpc_circle(radius):
Expand Down
6 changes: 3 additions & 3 deletions straxen/mini_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,9 +135,9 @@ def wrapped_f(context: strax.Context, run_id: str, **kwargs):
if kwargs.get("time_range") is None:
scr = None
else:
scr = tuple([
(t - kwargs["t_reference"]) / int(1e9) for t in kwargs["time_range"]
])
scr = tuple(
[(t - kwargs["t_reference"]) / int(1e9) for t in kwargs["time_range"]]
)
kwargs.setdefault("seconds_range", scr)

kwargs.setdefault("run_id", run_id)
Expand Down
6 changes: 3 additions & 3 deletions straxen/plugins/aqmon_hits/aqmon_hits.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,9 +136,9 @@ def find_aqmon_hits_per_channel(self, records):
aqmon_hits = strax.find_hits(records[~is_artificial], min_amplitude=aqmon_thresholds)

if np.sum(is_artificial):
aqmon_hits = np.concatenate([
aqmon_hits, self.get_deadtime_hits(records[is_artificial])
])
aqmon_hits = np.concatenate(
[aqmon_hits, self.get_deadtime_hits(records[is_artificial])]
)
return aqmon_hits

def get_deadtime_hits(self, artificial_deadtime):
Expand Down
20 changes: 8 additions & 12 deletions straxen/plugins/events/event_ambience.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,14 @@ def origin_dtype(self):
def infer_dtype(self):
dtype = []
for ambience in self.origin_dtype:
dtype.append(
(
(f"Number of {' '.join(ambience.split('_'))} main S1", f"s1_n_{ambience}"),
np.int16,
)
)
dtype.append(
(
(f"Number of {' '.join(ambience.split('_'))} main S2", f"s2_n_{ambience}"),
np.int16,
)
)
dtype.append((
(f"Number of {' '.join(ambience.split('_'))} main S1", f"s1_n_{ambience}"),
np.int16,
))
dtype.append((
(f"Number of {' '.join(ambience.split('_'))} main S2", f"s2_n_{ambience}"),
np.int16,
))
dtype += strax.time_fields
return dtype

Expand Down
30 changes: 12 additions & 18 deletions straxen/plugins/events/event_area_per_channel.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,24 +37,18 @@ def infer_dtype(self):
# populating APC
ptypes = ["s1", "s2", "alt_s1", "alt_s2"]
for type_ in ptypes:
dtype += [
(
(f"Area per channel for {infoline[type_]}", f"{type_}_area_per_channel"),
pfields_["area_per_channel"][0],
)
]
dtype += [
(
(f"Length of the interval in samples for {infoline[type_]}", f"{type_}_length"),
pfields_["length"][0],
)
]
dtype += [
(
(f"Width of one sample for {infoline[type_]} [ns]", f"{type_}_dt"),
pfields_["dt"][0],
)
]
dtype += [(
(f"Area per channel for {infoline[type_]}", f"{type_}_area_per_channel"),
pfields_["area_per_channel"][0],
)]
dtype += [(
(f"Length of the interval in samples for {infoline[type_]}", f"{type_}_length"),
pfields_["length"][0],
)]
dtype += [(
(f"Width of one sample for {infoline[type_]} [ns]", f"{type_}_dt"),
pfields_["dt"][0],
)]
# populating S1 n channel properties
n_channel_dtype = [
(("Main S1 count of contributing PMTs", "s1_n_channels"), np.int16),
Expand Down
67 changes: 27 additions & 40 deletions straxen/plugins/events/event_positions.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,50 +87,37 @@ def infer_dtype(self):
naive_pos = []
fdc_pos = []
for j in "r z".split():
naive_pos += [
(
f"{j}_naive",
np.float32,
f"Main interaction {j}-position with observed position (cm)",
)
]
fdc_pos += [
(
f"{j}_field_distortion_correction",
np.float32,
f"Correction added to {j}_naive for field distortion (cm)",
)
]
naive_pos += [(
f"{j}_naive",
np.float32,
f"Main interaction {j}-position with observed position (cm)",
)]
fdc_pos += [(
f"{j}_field_distortion_correction",
np.float32,
f"Correction added to {j}_naive for field distortion (cm)",
)]
for s_i in [1, 2]:
naive_pos += [
(
f"alt_s{s_i}_{j}_naive",
np.float32,
(
f"Alternative S{s_i} interaction (rel. main S{3 - s_i}) {j}-position"
" with observed position (cm)"
),
)
]
fdc_pos += [
(
f"alt_s{s_i}_{j}_field_distortion_correction",
np.float32,
f"Correction added to alt_s{s_i}_{j}_naive for field distortion (cm)",
)
]
dtype += naive_pos + fdc_pos
for s_i in [1, 2]:
dtype += [
(
f"alt_s{s_i}_theta",
naive_pos += [(
f"alt_s{s_i}_{j}_naive",
np.float32,
(
f"Alternative S{s_i} (rel. main S{3 - s_i}) interaction angular position"
" (radians)"
f"Alternative S{s_i} interaction (rel. main S{3 - s_i}) {j}-position"
" with observed position (cm)"
),
)
]
)]
fdc_pos += [(
f"alt_s{s_i}_{j}_field_distortion_correction",
np.float32,
f"Correction added to alt_s{s_i}_{j}_naive for field distortion (cm)",
)]
dtype += naive_pos + fdc_pos
for s_i in [1, 2]:
dtype += [(
f"alt_s{s_i}_theta",
np.float32,
f"Alternative S{s_i} (rel. main S{3 - s_i}) interaction angular position (radians)",
)]

dtype += [("theta", np.float32, f"Main interaction angular position (radians)")]
return dtype + strax.time_fields
Expand Down
94 changes: 41 additions & 53 deletions straxen/plugins/events/event_shadow.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,80 +25,68 @@ def infer_dtype(self):
# previous S2 can cast both time & position shadow
for key in ["s1_time_shadow", "s2_time_shadow", "s2_position_shadow"]:
type_str, tp_desc, _ = key.split("_")
dtype.append(
dtype.append((
(
(
(
f"largest {tp_desc} shadow casting from previous {type_str} to"
f" {main_peak_desc} [PE/ns]"
),
f"{main_peak}shadow_{key}",
f"largest {tp_desc} shadow casting from previous {type_str} to"
f" {main_peak_desc} [PE/ns]"
),
np.float32,
)
)
dtype.append(
f"{main_peak}shadow_{key}",
),
np.float32,
))
dtype.append((
(
(
(
f"time difference from the previous {type_str} casting largest"
f" {tp_desc} shadow to {main_peak_desc} [ns]"
),
f"{main_peak}dt_{key}",
f"time difference from the previous {type_str} casting largest"
f" {tp_desc} shadow to {main_peak_desc} [ns]"
),
np.int64,
)
)
f"{main_peak}dt_{key}",
),
np.int64,
))
# Only previous S2 peaks have (x,y)
if "s2" in key:
dtype.append(
dtype.append((
(
(
(
f"x of previous s2 peak casting largest {tp_desc} shadow on"
f" {main_peak_desc} [cm]"
),
f"{main_peak}x_{key}",
f"x of previous s2 peak casting largest {tp_desc} shadow on"
f" {main_peak_desc} [cm]"
),
np.float32,
)
)
dtype.append(
f"{main_peak}x_{key}",
),
np.float32,
))
dtype.append((
(
(
(
f"y of previous s2 peak casting largest {tp_desc} shadow on"
f" {main_peak_desc} [cm]"
),
f"{main_peak}y_{key}",
f"y of previous s2 peak casting largest {tp_desc} shadow on"
f" {main_peak_desc} [cm]"
),
np.float32,
)
)
f"{main_peak}y_{key}",
),
np.float32,
))
# Only time shadow gives the nearest large peak
if "time" in key:
dtype.append(
dtype.append((
(
(
(
f"time difference from the nearest previous large {type_str} to"
f" {main_peak_desc} [ns]"
),
f"{main_peak}nearest_dt_{type_str}",
f"time difference from the nearest previous large {type_str} to"
f" {main_peak_desc} [ns]"
),
np.int64,
)
)
f"{main_peak}nearest_dt_{type_str}",
),
np.int64,
))
# Also record the PDF of HalfCauchy when calculating S2 position shadow
dtype.append(
dtype.append((
(
(
f"PDF describing correlation between previous s2 and {main_peak_desc}",
f"{main_peak}pdf_s2_position_shadow",
),
np.float32,
)
)
f"PDF describing correlation between previous s2 and {main_peak_desc}",
f"{main_peak}pdf_s2_position_shadow",
),
np.float32,
))
dtype += strax.time_fields
return dtype

Expand Down
Loading