Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

OPSIM-1155: Update DDF metrics / metric_subsets #411

Merged
merged 21 commits into from
Jun 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
f1a5b6b
Change the name 'metric_set' to 'metric_subsets'
rhiannonlynne May 17, 2024
5778c48
Add convenience functions to make a wfd and circular healpix subset s…
rhiannonlynne May 20, 2024
7f746cb
Remove unused dictionary/print functions
rhiannonlynne May 20, 2024
392acf5
Line length and doc string format.
rhiannonlynne May 20, 2024
136d12b
Export 'coadd_m5' and only print (common) warning with verbose flag
rhiannonlynne May 20, 2024
8479bda
Only print (common) warnings with verbose flag
rhiannonlynne May 20, 2024
ba0b791
Let unislicer carry "slicepoint" information if needed.
rhiannonlynne May 20, 2024
6b56cf3
Change how "filled" flag works
rhiannonlynne May 20, 2024
265f9d8
Add coadd-per-night metric (intended for DDFs primarily)
rhiannonlynne May 20, 2024
f8dcf88
Line length and doc string format.
rhiannonlynne May 20, 2024
c70b18a
Sort of a hack to allow the "Identity" metric automatically applied t…
rhiannonlynne May 20, 2024
4158197
Update the DDF metric batch.
rhiannonlynne May 20, 2024
595310d
Warning if no pyoorb
rhiannonlynne Jun 11, 2024
1aae39b
Black/isort/ruff
rhiannonlynne May 21, 2024
a5dc99c
Update data download versions
rhiannonlynne Jun 14, 2024
85075ea
Wider tolerance for skybrightness: intel vs. m2
rhiannonlynne Jun 14, 2024
052947b
Black for ooephemerides again
rhiannonlynne Jun 14, 2024
7a22073
Update to newer test datafile, without skybrightness pre files
rhiannonlynne Jun 17, 2024
2979237
Update tests to use new example database
rhiannonlynne Jun 17, 2024
ed8f382
Remove obsolete dither code and re-enable remaining dither options
rhiannonlynne Jun 17, 2024
c9831a7
black and isort
rhiannonlynne Jun 17, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions rubin_sim/data/rs_download_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,15 +52,15 @@ def data_dict():
# to create tar files and follow any sym links, run: e.g.
# ``tar -chvzf maf_may_2021.tgz maf``
file_dict = {
"maf": "maf_2022_08_26.tgz",
"maf": "maf_2024_06_13.tgz",
"maps": "maps_2022_2_28.tgz",
"movingObjects": "movingObjects_oct_2021.tgz",
"orbits": "orbits_2022_3_1.tgz",
"orbits_precompute": "orbits_precompute_2023_05_23.tgz",
"sim_baseline": "sim_baseline_2023_09_22.tgz",
"sim_baseline": "sim_baseline_2024_06_13.tgz",
"skybrightness": "skybrightness_2023_09_11.tgz",
"throughputs": "throughputs_2023_09_22.tgz",
"tests": "tests_2022_10_18.tgz",
"tests": "tests_2024_04_23.tgz",
}
return file_dict

Expand Down
102 changes: 76 additions & 26 deletions rubin_sim/maf/batches/ddf_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,23 +232,8 @@ def ddfBatch(
)

# Weak lensing visits
# The "magic numbers" here scale the final depth into
# approximately consistent visits per year - final depth is
# determined by arbitrary definition of 'good sample'
lim_ebv = 0.2
offset = 0.1
mag_cuts = {
1: 24.75 - offset,
2: 25.12 - offset,
3: 25.35 - offset,
4: 25.5 - offset,
5: 25.62 - offset,
6: 25.72 - offset,
7: 25.8 - offset,
8: 25.87 - offset,
9: 25.94 - offset,
10: 26.0 - offset,
}
mag_cuts = 26.0
displayDict["group"] = "Weak Lensing"
displayDict["subgroup"] = ""
displayDict["caption"] = f"Weak lensing metric in the {fieldname} DDF."
Expand All @@ -261,7 +246,7 @@ def ddfBatch(
for sql in sqls_gri:
metric = maf.WeakLensingNvisits(
lsst_filter="i",
depth_cut=mag_cuts[10],
depth_cut=mag_cuts,
ebvlim=lim_ebv,
min_exp_time=20.0,
metric_name="WeakLensingNvisits_" + sql,
Expand Down Expand Up @@ -389,7 +374,7 @@ def ddfBatch(
display_dict=displayDict,
)
)

#######
# Coadded depth per filter, and count per filter
displayDict["group"] = "Basics"
for f in "ugrizy":
Expand Down Expand Up @@ -464,11 +449,22 @@ def ddfBatch(
)
)

# Now to compute some things at just the center of the DDF
# Now to compute some things ~~at just the center of the DDF~~ NOPE
# (will compute these "per DDF" not just at the center, since
# the dithering pattern is not yet set and that will influence the
# result -- once dithering is better determined, could add ptslicer).
# For these metrics, add a requirement that the 'note' label
# match the DDF, to avoid WFD visits skewing the results
# (we want to exclude non-DD visits),
ptslicer = maf.UserPointsSlicer(np.mean(ddfs[ddf]["ra"]), np.mean(ddfs[ddf]["dec"]))

if fieldname == "WFD":
ptslicer = maf.UserPointsSlicer(np.mean(ddfs[ddf]["ra"]), np.mean(ddfs[ddf]["dec"]))
else:
ptslicer = maf.UniSlicer() # rely on query to remove non-DD visits
# Add RA and Dec to slice_point data (for season calculations)
# slice_points store ra/dec internally in radians.
ptslicer.slice_points["ra"] = np.radians(np.mean(ddfs[ddf]["ra"]))
ptslicer.slice_points["dec"] = np.radians(np.mean(ddfs[ddf]["dec"]))

displayDict["group"] = "Cadence"
displayDict["order"] = order
Expand All @@ -488,17 +484,15 @@ def ddfBatch(
displayDict["subgroup"] = "Sequence length"

# Number of observations per night, any filter (sequence length)
# Histogram the number of visits per night at the center of the DDF
# Histogram the number of visits per night
countbins = np.arange(0, 200, 5)
metric = maf.NVisitsPerNightMetric(
night_col="night",
bins=countbins,
metric_name=f"{fieldname} NVisitsPerNight",
)
plotDict = {"bins": countbins, "xlabel": "Number of visits per night"}
displayDict["caption"] = (
f"Histogram of the number of visits in each night, at the center of {fieldname}."
)
displayDict["caption"] = "Histogram of the number of visits in each night per DDF."
plotFunc = maf.SummaryHistogram()
bundle = maf.MetricBundle(
metric,
Expand All @@ -511,17 +505,47 @@ def ddfBatch(
)
bundle_list.append(bundle)

# Coadded depth of observations per night, each filter
# "magic numbers" to fill plot come from baseline v3.4
min_coadds = {"u": 22.3, "g": 22.3, "r": 22.9, "i": 23.1, "z": 21.7, "y": 21.5}
max_coadds = {"u": 26, "g": 27.2, "r": 27, "i": 26.5, "z": 26.5, "y": 25.1}
# Histogram the coadded depth per night, per filter
for f in "ugrizy":
magbins = np.arange(min_coadds[f], max_coadds[f], 0.05)
metric = maf.CoaddM5PerNightMetric(
night_col="night",
m5_col="fiveSigmaDepth",
bins=magbins,
metric_name=f"{fieldname} CoaddM5PerNight",
)
plotDict = {"bins": magbins, "xlabel": "Coadded Depth Per Night"}
displayDict["caption"] = f"Histogram of the coadded depth in {f} in each night per DDF."
plotFunc = maf.SummaryHistogram()
bundle = maf.MetricBundle(
metric,
ptslicer,
fieldsqls[f],
info_label=info_labels[f],
plot_dict=plotDict,
display_dict=displayDict,
plot_funcs=[plotFunc],
)
bundle_list.append(bundle)

# Plot of number of visits per night over time
if fieldname.endswith("WFD"):
pass
else:
displayDict["caption"] = f"Number of visits per night for {fieldname}."
metric = maf.CountMetric("observationStartMJD", metric_name=f"{fieldname} Nvisits Per Night")
slicer = maf.OneDSlicer(slice_col_name="night", bin_size=1)
slicer = maf.OneDSlicer(slice_col_name="night", bin_size=1, badval=0)
plot_dict = {"filled_data": True}
bundle = maf.MetricBundle(
metric,
slicer,
fieldsqls["all"],
info_label=info_labels["all"],
plot_dict=plot_dict,
display_dict=displayDict,
summary_metrics=[
maf.MedianMetric(),
Expand All @@ -532,6 +556,31 @@ def ddfBatch(
)
bundle_list.append(bundle)

# Likewise, but coadded depth per filter
if fieldname.endswith("WFD"):
pass
else:
for f in "ugrizy":
displayDict["caption"] = f"Coadded depth per night for {fieldname} in band {f}."
metric = maf.Coaddm5Metric(metric_name=f"{fieldname} CoaddedM5 Per Night")
slicer = maf.OneDSlicer(slice_col_name="night", bin_size=1, badval=min_coadds[f])
plot_dict = {"filled_data": True}
bundle = maf.MetricBundle(
metric,
slicer,
fieldsqls[f],
info_label=info_labels[f],
plot_dict=plot_dict,
display_dict=displayDict,
summary_metrics=[
maf.MedianMetric(),
maf.PercentileMetric(percentile=80, metric_name="80thPercentile"),
maf.MinMetric(),
maf.MaxMetric(),
],
)
bundle_list.append(bundle)

displayDict["subgroup"] = "Sequence gaps"

# Histogram of the number of nights between visits, all filters
Expand Down Expand Up @@ -582,12 +631,13 @@ def rfunc(simdata):
# Sometimes number of seasons is 10, sometimes 11
# (depending on where survey starts/end)
# so normalize it so there's always 11 values
# by adding 0 at the end.
if len(simdata) < 11:
simdata = np.concatenate([simdata, np.array([0], float)])
return simdata

metric = maf.SeasonLengthMetric(reduce_func=rfunc, metric_dtype="object")
plotDict = {"bins": np.arange(0, 12), "xlabel": "Season length (days)"}
plotDict = {"bins": np.arange(0, 12), "ylabel": "Season length (days)", "xlabel": "Season"}
plotFunc = maf.SummaryHistogram()
displayDict["caption"] = f"Plot of the season length per season in the {fieldname} DDF."
displayDict["order"] = order
Expand Down
2 changes: 1 addition & 1 deletion rubin_sim/maf/db/results_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -574,7 +574,7 @@ def update_summary_stat(self, metric_id, summary_name, summary_value, ntry=3, pa
# np.ndarray with 'name' and 'value' columns.
self.open()
tries = 0
if isinstance(summary_value, np.ndarray):
if isinstance(summary_value, np.ndarray) and summary_value.dtype.names is not None:
if ("name" in summary_value.dtype.names) and ("value" in summary_value.dtype.names):
for value in summary_value:
sSuffix = value["name"]
Expand Down
18 changes: 4 additions & 14 deletions rubin_sim/maf/metadata_dir.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,13 @@
import shutil

import matplotlib
import numpy as np

matplotlib.use("Agg")

from . import batches as batches
from .db import ResultsDb
from .metric_bundles import MetricBundle, MetricBundleGroup
from .metrics import CountExplimMetric
from .slicers import HealpixSlicer, HealpixSubsetSlicer
from .metric_bundles import MetricBundleGroup
from .slicers import HealpixSlicer, make_wfd_subset_slicer


def metadata_dir():
Expand Down Expand Up @@ -77,17 +75,9 @@ def metadata_dir():
if os.path.isdir(out_dir):
shutil.rmtree(out_dir)

# Find the 'wfd' footprint
m = CountExplimMetric(col="observationStartMJD")
# Find the 'wfd' footprint - use the scheduler footprint.
allsky_slicer = HealpixSlicer(nside=args.nside)
constraint = 'note not like "%DD%"'
bundle = MetricBundle(m, allsky_slicer, constraint, run_name=sim_name)
g = MetricBundleGroup({f"{sim_name} footprint": bundle}, filename, out_dir=out_dir)
g.run_all()
wfd_footprint = bundle.metric_values.filled(0)
wfd_footprint = np.where(wfd_footprint > args.wfd_threshold, 1, 0)
wfd_hpix = np.where(wfd_footprint == 1)[0]
wfd_slicer = HealpixSubsetSlicer(nside=args.nside, hpid=wfd_hpix)
wfd_slicer = make_wfd_subset_slicer(nside=args.nside)

bdict = batches.info_bundle_dicts(allsky_slicer, wfd_slicer, sim_name, colmap)

Expand Down
Loading
Loading