diff --git a/rubin_sim/maf/batches/col_map_dict.py b/rubin_sim/maf/batches/col_map_dict.py index b4674755..8077d7d7 100644 --- a/rubin_sim/maf/batches/col_map_dict.py +++ b/rubin_sim/maf/batches/col_map_dict.py @@ -26,8 +26,6 @@ def col_map_dict(dict_name=None): col_map["seeingGeom"] = "seeingFwhmGeom" col_map["skyBrightness"] = "skyBrightness" col_map["moonDistance"] = "moonDistance" - col_map["fieldId"] = "fieldId" - col_map["proposalId"] = "proposalId" col_map["slewactivities"] = {} col_map["metadataList"] = [ "airmass", @@ -41,7 +39,7 @@ def col_map_dict(dict_name=None): "saturation_mag", ] col_map["metadataAngleList"] = ["rotSkyPos"] - col_map["note"] = "note" + col_map["scheduler_note"] = "scheduler_note" elif dict_name == "opsimv4": col_map = {} diff --git a/rubin_sim/maf/batches/ddf_batch.py b/rubin_sim/maf/batches/ddf_batch.py index 9caa2ee8..3f29de62 100644 --- a/rubin_sim/maf/batches/ddf_batch.py +++ b/rubin_sim/maf/batches/ddf_batch.py @@ -2,7 +2,13 @@ import healpy as hp import numpy as np -from rubin_scheduler.utils import angular_separation, ddf_locations, hpid2_ra_dec, sample_patch_on_sphere +from rubin_scheduler.utils import ( + angular_separation, + ddf_locations, + ddf_locations_pre3_5, + hpid2_ra_dec, + sample_patch_on_sphere, +) import rubin_sim.maf as maf @@ -16,6 +22,7 @@ def ddfBatch( nside_sne=128, extra_sql=None, extra_info_label=None, + old_coords=False, ): """ A set of metrics to evaluate DDF fields. @@ -42,6 +49,8 @@ def ddfBatch( necessary sql constraints for each metric. extra_info_label : `str`, optional Additional description information to add (alongside the extra_sql) + old_coords : `bool` + Use the default locations for the DDFs from pre-July 2024. Default False. Returns ------- @@ -53,7 +62,10 @@ def ddfBatch( # Define the slicer to use for each DDF # Get standard DDF locations and reformat information as a dictionary ddfs = {} - ddfs_rough = ddf_locations() + if old_coords: + ddfs_rough = ddf_locations_pre3_5() + else: + ddfs_rough = ddf_locations() for ddf in ddfs_rough: ddfs[ddf] = {"ra": ddfs_rough[ddf][0], "dec": ddfs_rough[ddf][1]} # Combine the Euclid double-field into one - but with two ra/dec values @@ -474,7 +486,7 @@ def ddfBatch( for f in filterlist: fieldsqls[f] = sqls[f] else: - fieldsql = f"note like '%{fieldname}%'" + fieldsql = f"scheduler_note like '%{fieldname}%'" for f in filterlist: if len(sqls[f]) > 0: fieldsqls[f] = fieldsql + " and " + sqls[f] diff --git a/rubin_sim/maf/batches/glance_batch.py b/rubin_sim/maf/batches/glance_batch.py index fa9005ec..44e8b7b9 100644 --- a/rubin_sim/maf/batches/glance_batch.py +++ b/rubin_sim/maf/batches/glance_batch.py @@ -144,7 +144,7 @@ def glanceBatch( bundle_list.append(bundle) # alt az of long gaps - sql = "note = 'long'" + sql = "scheduler_note = 'long'" metric = metrics.CountMetric(colmap["mjd"], metric_name="Nvisits long") bundle = metric_bundles.MetricBundle( metric, @@ -156,7 +156,7 @@ def glanceBatch( ) bundle_list.append(bundle) - sql = "note like 'blob_long%'" + sql = "scheduler_note like 'blob_long%'" metric = metrics.CountMetric(colmap["mjd"], metric_name="Nvisits blob long") bundle = metric_bundles.MetricBundle( metric, @@ -168,7 +168,7 @@ def glanceBatch( ) bundle_list.append(bundle) - sql = "note like '%neo%' or note like '%near_sun%'" + sql = "scheduler_note like '%neo%' or scheduler_note like '%near_sun%'" metric = metrics.CountMetric(colmap["mjd"], metric_name="Nvisits twilight near sun") bundle = metric_bundles.MetricBundle( metric, @@ -182,7 +182,7 @@ def glanceBatch( # alt,az pf ToO - sql = "note like 'ToO%'" + sql = "scheduler_note like 'ToO%'" metric = metrics.CountMetric(colmap["mjd"], metric_name="Nvisits long") bundle = metric_bundles.MetricBundle( metric, @@ -277,7 +277,7 @@ def glanceBatch( bundle_list.append(bundle) # Make a cumulative plot of a WFD spot - sql = "note not like '%NEO%' and note not like '%near_sun%'" + sql = "scheduler_note not like '%NEO%' and scheduler_note not like '%near_sun%'" uslicer = slicers.UserPointsSlicer(ra=0, dec=-20) metric = metrics.CumulativeMetric() metricb = metric_bundles.MetricBundle( @@ -362,16 +362,16 @@ def glanceBatch( ) bundle_list.append(bundle) - # stats from the note column - if "note" in colmap.keys(): + # stats from the scheduler_note column + if "scheduler_note" in colmap.keys(): displayDict = {"group": "Basic Stats", "subgroup": "Percent stats"} - metric = metrics.StringCountMetric(col=colmap["note"], percent=True, metric_name="Percents") + metric = metrics.StringCountMetric(col=colmap["scheduler_note"], percent=True, metric_name="Percents") sql = "" slicer = slicers.UniSlicer() bundle = metric_bundles.MetricBundle(metric, slicer, sql, display_dict=displayDict) bundle_list.append(bundle) displayDict["subgroup"] = "Count Stats" - metric = metrics.StringCountMetric(col=colmap["note"], metric_name="Counts") + metric = metrics.StringCountMetric(col=colmap["scheduler_note"], metric_name="Counts") bundle = metric_bundles.MetricBundle(metric, slicer, sql, display_dict=displayDict) bundle_list.append(bundle) @@ -381,7 +381,7 @@ def glanceBatch( displayDict["subgroup"] = "" for ddf in ddf_surveys: label = ddf.replace("DD:", "") - sql = 'note like "%s%%"' % ("DD:" + label) + sql = 'scheduler_note like "%s%%"' % ("DD:" + label) slicer = slicers.UniSlicer() metric = metrics.CumulativeMetric() metricb = metric_bundles.MetricBundle( @@ -515,7 +515,7 @@ def glanceBatch( lat_lon_deg=colmap["raDecDeg"], ) for filtername in filternames: - sql = "filter='%s' and note like 'ToO%%'" % filtername + sql = "filter='%s' and scheduler_note like 'ToO%%'" % filtername metric = metrics.CountMetric(col=colmap["mjd"], metric_name="N ToO") bundle = metric_bundles.MetricBundle( metric, @@ -528,7 +528,9 @@ def glanceBatch( ) bundle_list.append(bundle) - too_sqls = ["note like 'ToO, %" + "t%i'" % hour for hour in [0, 1, 2, 4, 24, 48]] + ["note like 'ToO, %'"] + too_sqls = ["scheduler_note like 'ToO, %" + "t%i'" % hour for hour in [0, 1, 2, 4, 24, 48]] + [ + "scheduler_note like 'ToO, %'" + ] slicer = slicers.UniSlicer() for sql in too_sqls: metric = metrics.CountMetric(col="night") diff --git a/rubin_sim/maf/batches/info_batch.py b/rubin_sim/maf/batches/info_batch.py index 9ce63a3f..62377dc1 100644 --- a/rubin_sim/maf/batches/info_batch.py +++ b/rubin_sim/maf/batches/info_batch.py @@ -61,7 +61,7 @@ def info_bundle_dicts(allsky_slicer, wfd_slicer, opsim="opsim", colmap=batches.c colmap, opsim, slicer=allsky_slicer, - extraSql='night > 365*3.5 and night < 365*4.5 and note not like "%DD%"', + extraSql='night > 365*3.5 and night < 365*4.5 and scheduler_note not like "%DD%"', extraInfoLabel="Yr 3-4", runLength=1, ) @@ -87,7 +87,7 @@ def info_bundle_dicts(allsky_slicer, wfd_slicer, opsim="opsim", colmap=batches.c extraInfoLabel="WFD", ) ) - dd_constraint = "note like '%DD%'" + dd_constraint = "scheduler_note like '%DD%'" bdict.update( batches.nvisitsPerSubset( colmap, diff --git a/rubin_sim/maf/batches/radar_limited.py b/rubin_sim/maf/batches/radar_limited.py index 927ebd74..26c1efd7 100644 --- a/rubin_sim/maf/batches/radar_limited.py +++ b/rubin_sim/maf/batches/radar_limited.py @@ -529,7 +529,7 @@ def radar_limited( for yr_cut in yrs: ptsrc_lim_mag_i_band = mag_cuts[yr_cut] sqlconstraint = "night <= %s" % (yr_cut * 365.25 + 0.5) - sqlconstraint += ' and note not like "DD%"' + sqlconstraint += ' and scheduler_note not like "DD%"' info_label = f"{bandpass} band non-DD year {yr_cut}" ThreebyTwoSummary_simple = metrics.StaticProbesFoMEmulatorMetricSimple( nside=nside, year=yr_cut, metric_name="3x2ptFoM_simple" @@ -581,7 +581,7 @@ def radar_limited( subgroupCount += 1 displayDict["subgroup"] = f"{subgroupCount}: WL" displayDict["order"] = 0 - sqlconstraint = 'note not like "DD%" and (filter="g" or filter="r" or filter="i")' + sqlconstraint = 'scheduler_note not like "DD%" and (filter="g" or filter="r" or filter="i")' info_label = "gri band non-DD" minExpTime = 15 m = metrics.WeakLensingNvisits( @@ -612,7 +612,7 @@ def radar_limited( # Do the weak lensing per year for year in [10]: sqlconstraint = ( - 'note not like "DD%"' + 'scheduler_note not like "DD%"' + ' and (filter="g" or filter="r" or filter="i") and night < %i' % (year * 365.25) ) m = metrics.WeakLensingNvisits( @@ -648,7 +648,7 @@ def radar_limited( bundleList.append(bundle) sqlconstraint = ( - 'note not like "DD%"' + 'scheduler_note not like "DD%"' + ' and (filter="r" or filter="i" or filter="z") and night < %i' % (year * 365.25) ) m = metrics.WeakLensingNvisits( @@ -741,7 +741,7 @@ def radar_limited( bundle = mb.MetricBundle( metric, snslicer, - "note not like '%DD%'", + "scheduler_note not like '%DD%'", plot_dict=plotDict, display_dict=displayDict, info_label="DDF excluded", @@ -925,7 +925,7 @@ def radar_limited( bundle = mb.MetricBundle( metric, kneslicer, - "note not like 'DD%'", + "scheduler_note not like 'DD%'", run_name=runName, info_label="single model", summary_metrics=lightcurve_summary(), @@ -947,7 +947,7 @@ def radar_limited( bundle = mb.MetricBundle( metric_allkne, kneslicer_allkne, - "note not like 'DD%'", + "scheduler_note not like 'DD%'", run_name=runName, info_label="all models", summary_metrics=lightcurve_summary(), diff --git a/rubin_sim/maf/batches/science_radar_batch.py b/rubin_sim/maf/batches/science_radar_batch.py index b1d058a9..f4c12bd0 100644 --- a/rubin_sim/maf/batches/science_radar_batch.py +++ b/rubin_sim/maf/batches/science_radar_batch.py @@ -568,7 +568,7 @@ def science_radar_batch( for yr_cut in yrs: ptsrc_lim_mag_i_band = mag_cuts[yr_cut] sqlconstraint = "night <= %s" % (yr_cut * 365.25 + 0.5) - sqlconstraint += ' and note not like "DD%"' + sqlconstraint += ' and scheduler_note not like "DD%"' info_label = f"{bandpass} band non-DD year {yr_cut}" ThreebyTwoSummary_simple = metrics.StaticProbesFoMEmulatorMetricSimple( nside=nside, year=yr_cut, metric_name="3x2ptFoM_simple" @@ -614,7 +614,7 @@ def science_radar_batch( plotDict = {"n_ticks": 5} # Have to include all filters in query to check for filter coverage. # Galaxy numbers calculated using 'bandpass' images only though. - sqlconstraint = 'note not like "DD%"' + sqlconstraint = 'scheduler_note not like "DD%"' info_label = f"{bandpass} band galaxies non-DD" metric = maf.DepthLimitedNumGalMetric( nside=nside, @@ -654,7 +654,7 @@ def science_radar_batch( subgroupCount += 1 displayDict["subgroup"] = f"{subgroupCount}: WL" displayDict["order"] = 0 - sqlconstraint = 'note not like "DD%" and (filter="g" or filter="r" or filter="i")' + sqlconstraint = 'scheduler_note not like "DD%" and (filter="g" or filter="r" or filter="i")' info_label = "gri band non-DD" minExpTime = 15 m = metrics.WeakLensingNvisits( @@ -686,7 +686,7 @@ def science_radar_batch( for year in np.arange(1, 10): displayDict["order"] = year sqlconstraint = ( - 'note not like "DD%"' + 'scheduler_note not like "DD%"' + ' and (filter="g" or filter="r" or filter="i") and night < %i' % (year * 365.25) ) m = metrics.WeakLensingNvisits( @@ -722,7 +722,7 @@ def science_radar_batch( bundleList.append(bundle) sqlconstraint = ( - 'note not like "DD%"' + 'scheduler_note not like "DD%"' + ' and (filter="r" or filter="i" or filter="z") and night < %i' % (year * 365.25) ) m = metrics.WeakLensingNvisits( @@ -781,7 +781,7 @@ def science_radar_batch( # Kuiper per year in gri and riz for year in np.arange(1, 10): sqlconstraint = ( - 'note not like "DD%"' + 'scheduler_note not like "DD%"' + ' and (filter="g" or filter="r" or filter="i") and night < %i' % (year * 365.25) ) metric1 = metrics.KuiperMetric("rotSkyPos", metric_name="Kuiper_rotSkyPos_gri_year%i" % year) @@ -835,7 +835,7 @@ def science_radar_batch( bundle = mb.MetricBundle( metric, snslicer, - "note not like '%DD%'", + "scheduler_note not like '%DD%'", plot_dict=plotDict, display_dict=displayDict, info_label="DDF excluded", @@ -858,7 +858,7 @@ def science_radar_batch( # Calculate the number of expected QSOs, in each band for f in filterlist: - sql = filtersqls[f] + ' and note not like "%DD%"' + sql = filtersqls[f] + ' and scheduler_note not like "%DD%"' md = filterinfo_label[f] + " and non-DD" summaryMetrics = [metrics.SumMetric(metric_name="Total QSO")] zmin = 0.3 @@ -1359,7 +1359,7 @@ def science_radar_batch( bundle = mb.MetricBundle( metric, kneslicer, - "note not like 'DD%'", + "scheduler_note not like 'DD%'", run_name=runName, info_label="single model", summary_metrics=lightcurve_summary(), @@ -1381,7 +1381,7 @@ def science_radar_batch( bundle = mb.MetricBundle( metric_allkne, kneslicer_allkne, - "note not like 'DD%'", + "scheduler_note not like 'DD%'", run_name=runName, info_label="all models", summary_metrics=lightcurve_summary(), diff --git a/rubin_sim/maf/ddf_dir.py b/rubin_sim/maf/ddf_dir.py index 57a272c8..382ad6c0 100755 --- a/rubin_sim/maf/ddf_dir.py +++ b/rubin_sim/maf/ddf_dir.py @@ -21,6 +21,9 @@ def ddf_dir(): parser = argparse.ArgumentParser() parser.add_argument("--db", type=str, default=None) parser.add_argument("--nside", type=int, default=512) + parser.add_argument("--old_coords", dest="old_coords", action="store_true") + parser.set_defaults(verbose=False) + args = parser.parse_args() if args.db is None: @@ -35,7 +38,7 @@ def ddf_dir(): shutil.rmtree(name + "_ddf") bdict = {} - bdict.update(batches.ddfBatch(run_name=name, nside=args.nside)) + bdict.update(batches.ddfBatch(run_name=name, nside=args.nside, old_coords=args.old_coords)) results_db = db.ResultsDb(out_dir=name + "_ddf") group = mb.MetricBundleGroup( bdict, diff --git a/rubin_sim/maf/metrics/sn_n_sn_metric.py b/rubin_sim/maf/metrics/sn_n_sn_metric.py index 21baee15..264c7095 100644 --- a/rubin_sim/maf/metrics/sn_n_sn_metric.py +++ b/rubin_sim/maf/metrics/sn_n_sn_metric.py @@ -86,7 +86,7 @@ def __init__( nexp_col="numExposures", vistime_col="visitTime", seeing_col="seeingFwhmEff", - note_col="note", + note_col="scheduler_note", season=[-1], coadd_night=True, zmin=0.1, diff --git a/rubin_sim/maf/metrics/use_metrics.py b/rubin_sim/maf/metrics/use_metrics.py index c3ad366a..2de5cfbb 100644 --- a/rubin_sim/maf/metrics/use_metrics.py +++ b/rubin_sim/maf/metrics/use_metrics.py @@ -8,7 +8,7 @@ class UseMetric(BaseMetric): # pylint: disable=too-few-public-methods """Metric to classify visits by type of visits""" - def __init__(self, note_col="note", **kwargs): + def __init__(self, note_col="scheduler_note", **kwargs): self.note_col = note_col super().__init__(col=[note_col], metric_dtype="object", **kwargs) diff --git a/rubin_sim/maf/plots/special_plotters.py b/rubin_sim/maf/plots/special_plotters.py index c9e579cf..767964bc 100644 --- a/rubin_sim/maf/plots/special_plotters.py +++ b/rubin_sim/maf/plots/special_plotters.py @@ -64,11 +64,11 @@ def __call__(self, metric_value, slicer, user_plot_dict, fig=None): plot_dict["scale"] = hp.nside2pixarea(slicer.nside, degrees=True) / 1000.0 if fig is None: - fig = plt.Figure(figsize=plot_dict["figsize"]) + fig, ax = plt.subplots(figsize=plot_dict["figsize"]) # Expect metric_value to be something like number of visits cumulative_area = np.arange(1, metric_value.compressed().size + 1)[::-1] * plot_dict["scale"] - plt.plot( + ax.plot( np.sort(metric_value.compressed()), cumulative_area, "k-", @@ -83,13 +83,13 @@ def __call__(self, metric_value, slicer, user_plot_dict, fig=None): f_o_area = metrics.FOArea(col="fO", n_visit=n_visits, norm=False, nside=slicer.nside).run(rarr) f_o_nv = metrics.FONv(col="fO", asky=asky, norm=False, nside=slicer.nside).run(rarr) - plt.axvline(x=n_visits, linewidth=plot_dict["reflinewidth"], color="b", linestyle=":") - plt.axhline(y=asky / 1000.0, linewidth=plot_dict["reflinewidth"], color="r", linestyle=":") + ax.axvline(x=n_visits, linewidth=plot_dict["reflinewidth"], color="b", linestyle=":") + ax.axhline(y=asky / 1000.0, linewidth=plot_dict["reflinewidth"], color="r", linestyle=":") # Add lines for nvis_median and f_o_area: # note if these are -666 (badval), they will 'disappear' nvis_median = f_o_nv["value"][np.where(f_o_nv["name"] == "MedianNvis")][0] - plt.axvline( + ax.axvline( x=nvis_median, linewidth=plot_dict["reflinewidth"], color="b", @@ -97,7 +97,7 @@ def __call__(self, metric_value, slicer, user_plot_dict, fig=None): linestyle="-", label=f"f$_0$ Med. Nvis. (@ {asky/1000 :.0f}K sq deg) = {nvis_median :.0f} visits", ) - plt.axhline( + ax.axhline( y=f_o_area / 1000.0, linewidth=plot_dict["reflinewidth"], color="r", @@ -105,20 +105,20 @@ def __call__(self, metric_value, slicer, user_plot_dict, fig=None): linestyle="-", label=f"f$_0$ Area (@ {n_visits :.0f} visits) = {f_o_area/1000 :.01f}K sq deg", ) - plt.legend(loc="upper right", fontsize="small", numpoints=1, framealpha=1.0) + ax.legend(loc="upper right", fontsize="small", numpoints=1, framealpha=1.0) - plt.xlabel(plot_dict["xlabel"]) - plt.ylabel(plot_dict["ylabel"]) - plt.title(plot_dict["title"]) + ax.set_xlabel(plot_dict["xlabel"]) + ax.set_ylabel(plot_dict["ylabel"]) + ax.set_title(plot_dict["title"]) x_min = plot_dict["x_min"] x_max = plot_dict["x_max"] y_min = plot_dict["y_min"] y_max = plot_dict["y_max"] if (x_min is not None) or (x_max is not None): - plt.xlim([x_min, x_max]) + ax.set_xlim([x_min, x_max]) if (y_min is not None) or (y_max is not None): - plt.ylim([y_min, y_max]) + ax.set_ylim([y_min, y_max]) return fig diff --git a/rubin_sim/maf/slicers/time_interval_slicers.py b/rubin_sim/maf/slicers/time_interval_slicers.py index f063d977..1b925f0e 100644 --- a/rubin_sim/maf/slicers/time_interval_slicers.py +++ b/rubin_sim/maf/slicers/time_interval_slicers.py @@ -147,7 +147,7 @@ def __init__( self, mjd_column_name="observationStartMJD", duration_column_name="visitTime", - note_column_name="note", + note_column_name="scheduler_note", badval=np.NaN, verbose=False, ): @@ -171,7 +171,7 @@ def setup_slicer(self, sim_data, maps=None): columns={ self.mjd_column_name: "mjd", self.duration_column_name: "duration", - self.note_column_name: "note", + self.note_column_name: "scheduler_note", }, inplace=True, ) diff --git a/rubin_sim/maf/stackers/__init__.py b/rubin_sim/maf/stackers/__init__.py index 97f492ec..e3fef24d 100644 --- a/rubin_sim/maf/stackers/__init__.py +++ b/rubin_sim/maf/stackers/__init__.py @@ -1,7 +1,6 @@ from .base_stacker import * from .coord_stackers import * from .date_stackers import * -from .dither_stackers import * from .general_stackers import * from .get_col_info import * from .label_stackers import * diff --git a/rubin_sim/maf/stackers/coord_stackers.py b/rubin_sim/maf/stackers/coord_stackers.py index b3cf93ac..fec7ab6b 100644 --- a/rubin_sim/maf/stackers/coord_stackers.py +++ b/rubin_sim/maf/stackers/coord_stackers.py @@ -7,7 +7,24 @@ from rubin_scheduler.utils import calc_lmst from .base_stacker import BaseStacker -from .dither_stackers import wrap_ra + + +def wrap_ra(ra): + """ + Wrap only RA values into 0-2pi (using mod). + + Parameters + ---------- + ra : numpy.ndarray + RA in radians + + Returns + ------- + numpy.ndarray + Wrapped RA values, in radians. + """ + ra = ra % (2.0 * np.pi) + return ra def ra_dec2_alt_az(ra, dec, lat, lon, mjd, altonly=False): diff --git a/rubin_sim/maf/stackers/dither_stackers.py b/rubin_sim/maf/stackers/dither_stackers.py deleted file mode 100644 index e43d2c37..00000000 --- a/rubin_sim/maf/stackers/dither_stackers.py +++ /dev/null @@ -1,645 +0,0 @@ -__all__ = ( - "setup_dither_stackers", - "wrap_ra_dec", - "wrap_ra", - "in_hexagon", - "polygon_coords", - "BaseDitherStacker", - "RandomDitherPerVisitStacker", - "RandomDitherPerNightStacker", - "RandomRotDitherPerFilterChangeStacker", -) - - -import warnings - -import numpy as np - -from .base_stacker import BaseStacker - -# Stacker naming scheme: -# [Pattern]DitherPer[Timescale]. -# Timescale indicates how often the dither offset is changed. - -# Original dither stackers (Random, Spiral, Hex) written by Lynne Jones -# (lynnej@uw.edu) -# Additional dither stackers written by Humna Awan (humna.awan@rutgers.edu), -# with addition of -# constraining dither offsets to be within an inscribed hexagon -# (code modifications for use here by LJ). - - -def setup_dither_stackers(ra_col, dec_col, degrees, **kwargs): - b = BaseStacker() - stacker_list = [] - if ra_col in b.source_dict: - stacker_list.append(b.source_dict[ra_col](degrees=degrees, **kwargs)) - if dec_col in b.source_dict: - if b.source_dict[ra_col] != b.source_dict[dec_col]: - stacker_list.append(b.source_dict[dec_col](degrees=degrees, **kwargs)) - return stacker_list - - -def wrap_ra_dec(ra, dec): - """ - Wrap RA into 0-2pi and Dec into +/0 pi/2. - - Parameters - ---------- - ra : numpy.ndarray - RA in radians - dec : numpy.ndarray - Dec in radians - - Returns - ------- - numpy.ndarray, numpy.ndarray - Wrapped RA/Dec values, in radians. - """ - # Wrap dec. - low = np.where(dec < -np.pi / 2.0)[0] - dec[low] = -1 * (np.pi + dec[low]) - ra[low] = ra[low] - np.pi - high = np.where(dec > np.pi / 2.0)[0] - dec[high] = np.pi - dec[high] - ra[high] = ra[high] - np.pi - # Wrap RA. - ra = ra % (2.0 * np.pi) - return ra, dec - - -def wrap_ra(ra): - """ - Wrap only RA values into 0-2pi (using mod). - - Parameters - ---------- - ra : numpy.ndarray - RA in radians - - Returns - ------- - numpy.ndarray - Wrapped RA values, in radians. - """ - ra = ra % (2.0 * np.pi) - return ra - - -def in_hexagon(x_off, y_off, max_dither): - """ - Identify dither offsets which fall within the inscribed hexagon. - - Parameters - ---------- - x_off : numpy.ndarray - The x values of the dither offsets. - yoff : numpy.ndarray - The y values of the dither offsets. - max_dither : float - The maximum dither offset. - - Returns - ------- - numpy.ndarray - Indexes of the offsets which are within the hexagon - inscribed inside the 'max_dither' radius circle. - """ - # Set up the hexagon limits. - # y = mx + b, 2h is the height. - m = np.sqrt(3.0) - b = m * max_dither - h = m / 2.0 * max_dither - # Identify offsets inside hexagon. - inside = np.where( - (y_off < m * x_off + b) - & (y_off > m * x_off - b) - & (y_off < -m * x_off + b) - & (y_off > -m * x_off - b) - & (y_off < h) - & (y_off > -h) - )[0] - return inside - - -def polygon_coords(nside, radius, rotation_angle): - """ - Find the x,y coords of a polygon. - - This is useful for plotting dither points and showing they lie within - a given shape. - - Parameters - ---------- - nside : int - The number of sides of the polygon - radius : float - The radius within which to plot the polygon - rotation_angle : float - The angle to rotate the polygon to. - - Returns - ------- - [float, float] - List of x/y coordinates of the points describing the polygon. - """ - each_angle = 2 * np.pi / float(nside) - x_coords = np.zeros(nside, float) - y_coords = np.zeros(nside, float) - for i in range(0, nside): - x_coords[i] = np.sin(each_angle * i + rotation_angle) * radius - y_coords[i] = np.cos(each_angle * i + rotation_angle) * radius - return list(zip(x_coords, y_coords)) - - -class BaseDitherStacker(BaseStacker): - """Base class for dither stackers. - - The base class just adds an easy way to define a stacker as - one of the 'dither' types of stackers. - These run first, before any other stackers. - - Parameters - ---------- - ra_col : str, optional - The name of the RA column in the data. - Default 'fieldRA'. - dec_col : str, optional - The name of the Dec column in the data. - Default 'fieldDec'. - degrees : bool, optional - Flag whether RA/Dec should be treated as (and kept as) degrees. - max_dither : float, optional - The radius of the maximum dither offset, in degrees. - Default 1.75 degrees. - in_hex : bool, optional - If True, offsets are constrained to lie within a hexagon - inscribed within the max_dither circle. - If False, offsets can lie anywhere out to the edges of - the max_dither circle. - Default True. - """ - - cols_added = [] - - def __init__( - self, - ra_col="fieldRA", - dec_col="fieldDec", - degrees=True, - max_dither=1.75, - in_hex=True, - ): - # Instantiate the RandomDither object and set internal variables. - self.ra_col = ra_col - self.dec_col = dec_col - self.degrees = degrees - # Convert max_dither to radians for internal use. - self.max_dither = np.radians(max_dither) - self.in_hex = in_hex - # self.units used for plot labels - if self.degrees: - self.units = ["deg", "deg"] - else: - self.units = ["rad", "rad"] - # Values required for framework operation: this specifies - # the data columns required from the database. - self.cols_req = [self.ra_col, self.dec_col] - - -class RandomDitherPerVisitStacker(BaseDitherStacker): - """ - Randomly dither the RA and Dec pointings up to max_dither degrees - from center, with a different offset for each visit. - - Parameters - ---------- - ra_col : str, optional - The name of the RA column in the data. - Default 'fieldRA'. - dec_col : str, optional - The name of the Dec column in the data. - Default 'fieldDec'. - degrees : bool, optional - Flag whether RA/Dec should be treated as (and kept as) degrees. - max_dither : float, optional - The radius of the maximum dither offset, in degrees. - Default 1.75 degrees. - in_hex : bool, optional - If True, offsets are constrained to lie within a - hexagon inscribed within the max_dither circle. - If False, offsets can lie anywhere out to the edges - of the max_dither circle. - Default True. - random_seed : int or None, optional - If set, then used as the random seed for the numpy random - number generation for the dither offsets. - Default None. - """ - - # Values required for framework operation: - # this specifies the name of the new columns. - cols_added = ["randomDitherPerVisitRa", "randomDitherPerVisitDec"] - - def __init__( - self, - ra_col="fieldRA", - dec_col="fieldDec", - degrees=True, - max_dither=1.75, - in_hex=True, - random_seed=None, - ): - """ - @ MaxDither in degrees - """ - super().__init__( - ra_col=ra_col, - dec_col=dec_col, - degrees=degrees, - max_dither=max_dither, - in_hex=in_hex, - ) - self.random_seed = random_seed - - def _generate_random_offsets(self, noffsets): - x_out = np.array([], float) - y_out = np.array([], float) - max_tries = 100 - tries = 0 - while (len(x_out) < noffsets) and (tries < max_tries): - dithers_rad = np.sqrt(self._rng.rand(noffsets * 2)) * self.max_dither - dithers_theta = self._rng.rand(noffsets * 2) * np.pi * 2.0 - x_off = dithers_rad * np.cos(dithers_theta) - y_off = dithers_rad * np.sin(dithers_theta) - if self.in_hex: - # Constrain dither offsets to be within hexagon. - idx = in_hexagon(x_off, y_off, self.max_dither) - x_off = x_off[idx] - y_off = y_off[idx] - x_out = np.concatenate([x_out, x_off]) - y_out = np.concatenate([y_out, y_off]) - tries += 1 - if len(x_out) < noffsets: - raise ValueError( - "Could not find enough random points within the hexagon in %d tries. " - "Try another random seed?" % (max_tries) - ) - self.x_off = x_out[0:noffsets] - self.y_off = y_out[0:noffsets] - - def _run(self, sim_data, cols_present=False): - if cols_present: - # Column already present in data; assume it is correct - # and does not need recalculating. - return sim_data - # Generate random numbers for dither, - # using defined seed value if desired. - if not hasattr(self, "_rng"): - if self.random_seed is not None: - self._rng = np.random.RandomState(self.random_seed) - else: - self._rng = np.random.RandomState(2178813) - - # Generate the random dither values. - noffsets = len(sim_data[self.ra_col]) - self._generate_random_offsets(noffsets) - # Add to RA and dec values. - if self.degrees: - ra = np.radians(sim_data[self.ra_col]) - dec = np.radians(sim_data[self.dec_col]) - else: - ra = sim_data[self.ra_col] - dec = sim_data[self.dec_col] - sim_data["randomDitherPerVisitRa"] = ra + self.x_off / np.cos(dec) - sim_data["randomDitherPerVisitDec"] = dec + self.y_off - # Wrap back into expected range. - ( - sim_data["randomDitherPerVisitRa"], - sim_data["randomDitherPerVisitDec"], - ) = wrap_ra_dec( - sim_data["randomDitherPerVisitRa"], - sim_data["randomDitherPerVisitDec"], - ) - # Convert to degrees - if self.degrees: - for col in self.cols_added: - sim_data[col] = np.degrees(sim_data[col]) - return sim_data - - -class RandomDitherPerNightStacker(RandomDitherPerVisitStacker): - """ - Randomly dither the RA and Dec pointings up to max_dither - degrees from center, one dither offset per night. - All pointings observed within the same night get the same offset. - - Parameters - ---------- - ra_col : str, optional - The name of the RA column in the data. - Default 'fieldRA'. - dec_col : str, optional - The name of the Dec column in the data. - Default 'fieldDec'. - degrees : bool, optional - Flag whether RA/Dec should be treated as (and kept as) degrees. - night_col : str, optional - The name of the night column in the data. - Default 'night'. - max_dither : float, optional - The radius of the maximum dither offset, in degrees. - Default 1.75 degrees. - in_hex : bool, optional - If True, offsets are constrained to lie within a hexagon - inscribed within the max_dither circle. - If False, offsets can lie anywhere out to the edges of the - max_dither circle. - Default True. - random_seed : int or None, optional - If set, then used as the random seed for the numpy random number - generation for the dither offsets. - Default None. - """ - - # Values required for framework operation: this specifies the - # names of the new columns. - cols_added = ["randomDitherPerNightRa", "randomDitherPerNightDec"] - - def __init__( - self, - ra_col="fieldRA", - dec_col="fieldDec", - degrees=True, - night_col="night", - max_dither=1.75, - in_hex=True, - random_seed=None, - ): - """ - @ MaxDither in degrees - """ - # Instantiate the RandomDither object and set internal variables. - super().__init__( - ra_col=ra_col, - dec_col=dec_col, - degrees=degrees, - max_dither=max_dither, - in_hex=in_hex, - random_seed=random_seed, - ) - self.night_col = night_col - # Values required for framework operation: - # this specifies the data columns required from the database. - self.cols_req = [self.ra_col, self.dec_col, self.night_col] - - def _run(self, sim_data, cols_present=False): - if cols_present: - return sim_data - # Generate random numbers for dither, - # using defined seed value if desired. - if not hasattr(self, "_rng"): - if self.random_seed is not None: - self._rng = np.random.RandomState(self.random_seed) - else: - self._rng = np.random.RandomState(66334) - - # Generate the random dither values, one per night. - nights = np.unique(sim_data[self.night_col]) - self._generate_random_offsets(len(nights)) - if self.degrees: - ra = np.radians(sim_data[self.ra_col]) - dec = np.radians(sim_data[self.dec_col]) - else: - ra = sim_data[self.ra_col] - dec = sim_data[self.dec_col] - # Add to RA and dec values. - for n, x, y in zip(nights, self.x_off, self.y_off): - match = np.where(sim_data[self.night_col] == n)[0] - sim_data["randomDitherPerNightRa"][match] = ra[match] + x / np.cos(dec[match]) - sim_data["randomDitherPerNightDec"][match] = dec[match] + y - # Wrap RA/Dec into expected range. - ( - sim_data["randomDitherPerNightRa"], - sim_data["randomDitherPerNightDec"], - ) = wrap_ra_dec(sim_data["randomDitherPerNightRa"], sim_data["randomDitherPerNightDec"]) - if self.degrees: - for col in self.cols_added: - sim_data[col] = np.degrees(sim_data[col]) - return sim_data - - -class RandomRotDitherPerFilterChangeStacker(BaseDitherStacker): - """ - Randomly dither the physical angle of the telescope rotator wrt the mount, - after every filter change. Visits (in between filter changes) that cannot - all be assigned an offset without surpassing the rotator limit are not - dithered. - - Parameters - ---------- - rot_tel_col : str, optional - The name of the column in the data specifying the physical angle - of the telescope rotator wrt. the mount. - Default: 'rotTelPos'. - filter_col : str, optional - The name of the filter column in the data. - Default: 'filter'. - degrees : `bool`, optional - True if angles in the database are in degrees (default). - If True, returned dithered values are in degrees also. - If False, angles assumed to be in radians and returned in radians. - max_dither : float, optional - Abs(maximum) rotational dither, in degrees. The dithers then will be - between -max_dither to max_dither. - Default: 90 degrees. - max_rot_angle : float, optional - Maximum rotator angle possible for the camera (degrees). - Default 90 degrees. - min_rot_angle : float, optional - Minimum rotator angle possible for the camera (degrees). - Default -90 degrees. - random_seed: int, optional - If set, then used as the random seed for the numpy random number - generation for the dither offsets. - Default: None. - debug: bool, optinal - If True, will print intermediate steps and plots histograms of - rotTelPos for cases when no dither is applied. - Default: False - """ - - # Values required for framework operation: this specifies - # the names of the new columns. - cols_added = ["randomDitherPerFilterChangeRotTelPos"] - - def __init__( - self, - rot_tel_col="rotTelPos", - filter_col="filter", - degrees=True, - max_dither=90.0, - max_rot_angle=90, - min_rot_angle=-90, - random_seed=None, - debug=False, - ): - # Instantiate the RandomDither object and set internal variables. - self.rot_tel_col = rot_tel_col - self.filter_col = filter_col - self.degrees = degrees - self.max_dither = max_dither - self.max_rot_angle = max_rot_angle - self.min_rot_angle = min_rot_angle - self.random_seed = random_seed - # self.units used for plot labels - if self.degrees: - self.units = ["deg"] - else: - self.units = ["rad"] - # Convert user-specified values into radians as well. - self.max_dither = np.radians(self.max_dither) - self.max_rot_angle = np.radians(self.max_rot_angle) - self.min_rot_angle = np.radians(self.min_rot_angle) - self.debug = debug - - # Values required for framework operation: - # specify the data columns required from the database. - self.cols_req = [self.rot_tel_col, self.filter_col] - - def _run(self, sim_data, cols_present=False): - if self.debug: - import matplotlib.pyplot as plt - - # Just go ahead and return if the columns were already in place. - if cols_present: - return sim_data - - # Generate random numbers for dither, using defined seed value - # if desired. - # Note that we must define the random state for np.random, - # to ensure consistency in the build system. - if not hasattr(self, "_rng"): - if self.random_seed is not None: - self._rng = np.random.RandomState(self.random_seed) - else: - self._rng = np.random.RandomState(544320) - - if len(np.where(sim_data[self.rot_tel_col] > self.max_rot_angle)[0]) > 0: - warnings.warn( - "Input data does not respect the specified maxRotAngle constraint: " - "(Re)Setting maxRotAngle to max value in the input data: %s" % max(sim_data[self.rot_tel_col]) - ) - self.max_rot_angle = max(sim_data[self.rot_tel_col]) - if len(np.where(sim_data[self.rot_tel_col] < self.min_rot_angle)[0]) > 0: - warnings.warn( - "Input data does not respect the specified minRotAngle constraint: " - "(Re)Setting minRotAngle to min value in the input data: %s" % min(sim_data[self.rot_tel_col]) - ) - self.min_rot_angle = min(sim_data[self.rot_tel_col]) - - # Identify points where the filter changes. - change_idxs = np.where(sim_data[self.filter_col][1:] != sim_data[self.filter_col][:-1])[0] - - # Add the random offsets to the RotTelPos values. - rot_dither = self.cols_added[0] - - if len(change_idxs) == 0: - # There are no filter changes, so nothing to dither. - # Just use original values. - sim_data[rot_dither] = sim_data[self.rot_tel_col] - else: - # For each filter change, generate a series of random - # values for the offsets, - # between +/- self.max_dither. These are potential values - # for the rotational offset. - # The offset actually used will be confined to ensure that - # rotTelPos for all visits in - # that set of observations (between filter changes) fall within - # the specified min/maxRotAngle -- without truncating the - # rotTelPos values. - - # Generate more offsets than needed - either 2x filter changes - # or 2500, whichever is bigger. - # 2500 is an arbitrary number. - max_num = max(len(change_idxs) * 2, 2500) - - rot_offset = np.zeros(len(sim_data), float) - # Some sets of visits will not be assigned dithers: - # it was too hard to find an offset. - n_problematic_ones = 0 - - # Loop over the filter change indexes (current filter change, - # next filter change) to identify - # sets of visits that should have the same offset. - for c, cn in zip(change_idxs, change_idxs[1:]): - random_offsets = self._rng.rand(max_num + 1) * 2.0 * self.max_dither - self.max_dither - i = 0 - potential_offset = random_offsets[i] - # Calculate new rotTelPos values, if we used this offset. - new_rot_tel = sim_data[self.rot_tel_col][c + 1 : cn + 1] + potential_offset - # Does it work? - # Do all values fall within minRotAngle / maxRotAngle? - good_to_go = (new_rot_tel >= self.min_rot_angle).all() and ( - new_rot_tel <= self.max_rot_angle - ).all() - while (not good_to_go) and (i < max_num): - # break if find a good offset or hit max_num tries. - i += 1 - potential_offset = random_offsets[i] - new_rot_tel = sim_data[self.rot_tel_col][c + 1 : cn + 1] + potential_offset - good_to_go = (new_rot_tel >= self.min_rot_angle).all() and ( - new_rot_tel <= self.max_rot_angle - ).all() - - if not good_to_go: - # i.e. no good offset was found after max_num tries - n_problematic_ones += 1 - rot_offset[c + 1 : cn + 1] = 0.0 - # no dither - else: - rot_offset[c + 1 : cn + 1] = random_offsets[i] - # assign the chosen offset - - # Handle the last set of observations (after the last filter - # change to the end of the survey). - random_offsets = self._rng.rand(max_num + 1) * 2.0 * self.max_dither - self.max_dither - i = 0 - potential_offset = random_offsets[i] - new_rot_tel = sim_data[self.rot_tel_col][change_idxs[-1] + 1 :] + potential_offset - good_to_go = (new_rot_tel >= self.min_rot_angle).all() and ( - new_rot_tel <= self.max_rot_angle - ).all() - while (not good_to_go) and (i < max_num): - # break if find a good offset or cant (after max_num tries) - i += 1 - potential_offset = random_offsets[i] - new_rot_tel = sim_data[self.rot_tel_col][change_idxs[-1] + 1 :] + potential_offset - good_to_go = (new_rot_tel >= self.min_rot_angle).all() and ( - new_rot_tel <= self.max_rot_angle - ).all() - - if not good_to_go: - # i.e. no good offset was found after max_num tries - n_problematic_ones += 1 - rot_offset[c + 1 : cn + 1] = 0.0 - else: - rot_offset[change_idxs[-1] + 1 :] = potential_offset - - # Assign the dithers - sim_data[rot_dither] = sim_data[self.rot_tel_col] + rot_offset - - # Final check to make sure things are okay - good_to_go = (sim_data[rot_dither] >= self.min_rot_angle).all() and ( - sim_data[rot_dither] <= self.max_rot_angle - ).all() - if not good_to_go: - message = "Rotational offsets are not working properly:\n" - message += " dithered rotTelPos: %s\n" % (sim_data[rot_dither]) - message += " minRotAngle: %s ; maxRotAngle: %s" % ( - self.min_rot_angle, - self.max_rot_angle, - ) - raise ValueError(message) - else: - return sim_data diff --git a/rubin_sim/maf/stackers/label_stackers.py b/rubin_sim/maf/stackers/label_stackers.py index bed625e1..84c7e929 100644 --- a/rubin_sim/maf/stackers/label_stackers.py +++ b/rubin_sim/maf/stackers/label_stackers.py @@ -47,7 +47,7 @@ def __init__( area_id_name="WFD", ra_col="fieldRA", dec_col="fieldDec", - note_col="note", + note_col="scheduler_note", exclude_dd=True, ): self.ra_col = ra_col diff --git a/rubin_sim/maf/stackers/sdss_stackers.py b/rubin_sim/maf/stackers/sdss_stackers.py index 3304ff06..1392482e 100644 --- a/rubin_sim/maf/stackers/sdss_stackers.py +++ b/rubin_sim/maf/stackers/sdss_stackers.py @@ -4,7 +4,7 @@ import numpy as np from .base_stacker import BaseStacker -from .dither_stackers import wrap_ra +from .coord_stackers import wrap_ra class SdssRADecStacker(BaseStacker): diff --git a/rubin_sim/phot_utils/sed.py b/rubin_sim/phot_utils/sed.py index 6ad8c06f..248ecec2 100644 --- a/rubin_sim/phot_utils/sed.py +++ b/rubin_sim/phot_utils/sed.py @@ -375,7 +375,7 @@ def cache_lsst_seds(wavelen_min=None, wavelen_max=None, cache_dir=None): class Sed: """ "Hold and use spectral energy distributions (SEDs)""" - def __init__(self, wavelen=None, flambda=None, fnu=None, badval=numpy.NaN, name=None): + def __init__(self, wavelen=None, flambda=None, fnu=None, badval=numpy.nan, name=None): """ Initialize sed object by giving filename or lambda/flambda array. @@ -798,7 +798,7 @@ def resample_sed( # Do the interpolation of wavelen/flux onto grid. # (type/len failures will die here). if wavelen[0] > wavelen_grid[0] or wavelen[-1] < wavelen_grid[-1]: - f = interpolate.interp1d(wavelen, flux, bounds_error=False, fill_value=numpy.NaN) + f = interpolate.interp1d(wavelen, flux, bounds_error=False, fill_value=numpy.nan) flux_grid = f(wavelen_grid) else: flux_grid = numpy.interp(wavelen_grid, wavelen, flux) diff --git a/tests/maf/test_stackers.py b/tests/maf/test_stackers.py index f15b5bdf..eb93db93 100644 --- a/tests/maf/test_stackers.py +++ b/tests/maf/test_stackers.py @@ -147,106 +147,6 @@ def _t_dither_per_night(self, diffsra, diffsdec, ra, dec, nights): self.assertAlmostEqual(dra_on_night.max(), 0) self.assertAlmostEqual(ddec_on_night.max(), 0) - def test_random_dither(self): - """ - Test the random dither pattern. - """ - max_dither = 0.5 - data = np.zeros(600, dtype=list(zip(["fieldRA", "fieldDec"], [float, float]))) - # Set seed so the test is stable - rng = np.random.RandomState(42) - # Restrict dithers to area where wraparound is not a problem for - # comparisons. - data["fieldRA"] = np.degrees(rng.random_sample(600) * (np.pi) + np.pi / 2.0) - data["fieldDec"] = np.degrees(rng.random_sample(600) * np.pi / 2.0 - np.pi / 4.0) - stacker = stackers.RandomDitherPerVisitStacker(max_dither=max_dither) - data = stacker.run(data) - diffsra = (data["fieldRA"] - data["randomDitherPerVisitRa"]) * np.cos(np.radians(data["fieldDec"])) - diffsdec = data["fieldDec"] - data["randomDitherPerVisitDec"] - # Check dithers within expected range. - self._t_dither_range(diffsra, diffsdec, data["fieldRA"], data["fieldDec"], max_dither) - - def test_random_dither_per_night(self): - """ - Test the per-night random dither pattern. - """ - max_dither = 0.5 - ndata = 600 - # Set seed so the test is stable - rng = np.random.RandomState(42) - - data = np.zeros( - ndata, - dtype=list( - zip( - ["fieldRA", "fieldDec", "fieldId", "night"], - [float, float, int, int], - ) - ), - ) - data["fieldRA"] = rng.rand(ndata) * (np.pi) + np.pi / 2.0 - data["fieldDec"] = rng.rand(ndata) * np.pi / 2.0 - np.pi / 4.0 - data["night"] = np.floor(rng.rand(ndata) * 10).astype("int") - stacker = stackers.RandomDitherPerNightStacker(max_dither=max_dither) - data = stacker.run(data) - diffsra = (np.radians(data["fieldRA"]) - np.radians(data["randomDitherPerNightRa"])) * np.cos( - np.radians(data["fieldDec"]) - ) - diffsdec = np.radians(data["fieldDec"]) - np.radians(data["randomDitherPerNightDec"]) - self._t_dither_range(diffsra, diffsdec, data["fieldRA"], data["fieldDec"], max_dither) - # Check that dithers on the same night are the same. - self._t_dither_per_night(diffsra, diffsdec, data["fieldRA"], data["fieldDec"], data["night"]) - - def test_random_rot_dither_per_filter_change_stacker(self): - """ - Test the rotational dither stacker. - """ - max_dither = 90 - filt = np.array(["r", "r", "r", "g", "g", "g", "r", "r"]) - rot_tel_pos = np.array([0, 0, 0, 0, 0, 0, 0, 0], float) - # Test that have a dither in rot offset for every filter change. - odata = np.zeros(len(filt), dtype=list(zip(["filter", "rotTelPos"], [(np.str_, 1), float]))) - odata["filter"] = filt - odata["rotTelPos"] = rot_tel_pos - stacker = stackers.RandomRotDitherPerFilterChangeStacker( - max_dither=max_dither, degrees=True, random_seed=99 - ) - data = stacker.run(odata) # run the stacker - random_dithers = data["randomDitherPerFilterChangeRotTelPos"] - # Check that first three visits have the same rot_tel_pos, etc. - rot_offsets = rot_tel_pos - random_dithers - # no dither w/o a filter change - self.assertEqual(rot_offsets[0], 0) - offset_changes = np.where(rot_offsets[1:] != rot_offsets[:-1])[0] - filt_changes = np.where(filt[1:] != filt[:-1])[0] - # dither after every filter - np.testing.assert_array_equal(offset_changes, filt_changes) - - # now test to ensure that user-defined max_rot_angle value works - # and that visits in between filter changes for which no offset - # can be found are left undithered - # (g band visits span rotator range, so can't be dithered) - gvisits = np.where(filt == "g") - maxrot = 30 - rot_tel_pos[gvisits[0][0]] = -maxrot - rot_tel_pos[gvisits[0][-1]] = maxrot - odata["rotTelPos"] = rot_tel_pos - stacker = stackers.RandomRotDitherPerFilterChangeStacker( - max_dither=max_dither, - degrees=True, - min_rot_angle=-maxrot, - max_rot_angle=maxrot, - random_seed=19231, - ) - data = stacker.run(odata) - random_dithers = data["randomDitherPerFilterChangeRotTelPos"] - # Check that we respected the range. - self.assertEqual(random_dithers.max(), 30) - self.assertEqual(random_dithers.min(), -30) - # Check that g band visits weren't dithered. - rot_offsets = rot_tel_pos - random_dithers - self.assertEqual(rot_offsets[gvisits].all(), 0) - def test_ha_stacker(self): """Test the Hour Angle stacker""" data = np.zeros(100, dtype=list(zip(["observationStartLST", "fieldRA"], [float, float])))