From c8d77218944efe00ed0a697a3a3c3785a379b10c Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 16 Oct 2024 20:33:16 -0700 Subject: [PATCH 01/27] move unused code --- .coveragerc | 28 ----------- {bin => deprecated/bin}/desi_qa_exposure | 0 {bin => deprecated/bin}/desi_qa_frame | 0 {bin => deprecated/bin}/desi_qa_night | 0 {bin => deprecated/bin}/desi_qa_prod | 0 {bin => deprecated/bin}/desi_qa_skyresid | 0 {py => deprecated/py}/desispec/io/qa.py | 0 .../py}/desispec/pipeline/__init__.py | 0 .../py}/desispec/pipeline/control.py | 0 {py => deprecated/py}/desispec/pipeline/db.py | 0 .../py}/desispec/pipeline/defs.py | 0 .../py}/desispec/pipeline/plan.py | 0 .../py}/desispec/pipeline/prod.py | 0 .../py}/desispec/pipeline/run.py | 0 .../py}/desispec/pipeline/scriptgen.py | 0 .../py}/desispec/pipeline/tasks/__init__.py | 0 .../py}/desispec/pipeline/tasks/base.py | 0 .../py}/desispec/pipeline/tasks/cframe.py | 0 .../py}/desispec/pipeline/tasks/extract.py | 0 .../py}/desispec/pipeline/tasks/fiberflat.py | 0 .../desispec/pipeline/tasks/fiberflatnight.py | 0 .../py}/desispec/pipeline/tasks/fibermap.py | 0 .../py}/desispec/pipeline/tasks/fluxcalib.py | 0 .../py}/desispec/pipeline/tasks/preproc.py | 0 .../py}/desispec/pipeline/tasks/psf.py | 0 .../py}/desispec/pipeline/tasks/psfnight.py | 0 .../py}/desispec/pipeline/tasks/qadata.py | 0 .../py}/desispec/pipeline/tasks/rawdata.py | 0 .../py}/desispec/pipeline/tasks/redshift.py | 0 .../py}/desispec/pipeline/tasks/sky.py | 0 .../py}/desispec/pipeline/tasks/spectra.py | 0 .../py}/desispec/pipeline/tasks/starfit.py | 0 .../py}/desispec/pipeline/tasks/traceshift.py | 0 {py => deprecated/py}/desispec/qa/__init__.py | 0 {py => deprecated/py}/desispec/qa/html.py | 0 {py => deprecated/py}/desispec/qa/qa_brick.py | 0 .../py}/desispec/qa/qa_exposure.py | 0 {py => deprecated/py}/desispec/qa/qa_frame.py | 0 .../py}/desispec/qa/qa_multiexp.py | 0 {py => deprecated/py}/desispec/qa/qa_night.py | 0 {py => deprecated/py}/desispec/qa/qa_plots.py | 0 .../py}/desispec/qa/qa_plots_ql.py | 0 {py => deprecated/py}/desispec/qa/qa_prod.py | 0 .../py}/desispec/qa/qa_quicklook.py | 0 {py => deprecated/py}/desispec/qa/qalib.py | 0 {py => deprecated/py}/desispec/qa/utils.py | 0 .../py}/desispec/quicklook/__init__.py | 0 .../py}/desispec/quicklook/arcprocess.py | 0 .../py}/desispec/quicklook/merger.py | 0 .../py}/desispec/quicklook/palib.py | 0 .../py}/desispec/quicklook/pas.py | 0 .../py}/desispec/quicklook/procalgs.py | 0 .../py}/desispec/quicklook/qas.py | 0 .../py}/desispec/quicklook/ql_plotlib.py | 0 .../py}/desispec/quicklook/qlboxcar.py | 0 .../py}/desispec/quicklook/qlconfig.py | 0 .../py}/desispec/quicklook/qlexceptions.py | 0 .../py}/desispec/quicklook/qlheartbeat.py | 0 .../py}/desispec/quicklook/qllogger.py | 0 .../py}/desispec/quicklook/qlpsf.py | 0 .../py}/desispec/quicklook/qlresolution.py | 0 .../py}/desispec/quicklook/quickfiberflat.py | 0 .../py}/desispec/quicklook/quicklook.py | 0 .../py}/desispec/quicklook/quicksky.py | 0 .../py}/desispec/scripts/qa_exposure.py | 0 .../py}/desispec/scripts/qa_frame.py | 0 .../py}/desispec/scripts/qa_night.py | 0 .../py}/desispec/scripts/qa_prod.py | 0 .../py}/desispec/scripts/skysubresid.py | 0 .../py}/desispec/test/integration_test.py | 0 .../py}/desispec/test/old_integration_test.py | 0 .../py}/desispec/test/test_qa.py | 0 .../py}/desispec/test/test_ql_pa.py | 0 .../py}/desispec/test/test_ql_qa.py | 0 .../py}/desispec/test/test_qlextract.py | 0 py/desispec/io/__init__.py | 5 +- py/desispec/io/meta.py | 47 ++----------------- py/desispec/scripts/fiberflat.py | 22 --------- py/desispec/scripts/sky.py | 22 --------- py/desispec/test/test_binscripts.py | 23 ++++----- py/desispec/test/test_bootcalib.py | 36 ++++++-------- py/desispec/test/test_io.py | 23 --------- 82 files changed, 28 insertions(+), 178 deletions(-) delete mode 100644 .coveragerc rename {bin => deprecated/bin}/desi_qa_exposure (100%) rename {bin => deprecated/bin}/desi_qa_frame (100%) rename {bin => deprecated/bin}/desi_qa_night (100%) rename {bin => deprecated/bin}/desi_qa_prod (100%) rename {bin => deprecated/bin}/desi_qa_skyresid (100%) rename {py => deprecated/py}/desispec/io/qa.py (100%) rename {py => deprecated/py}/desispec/pipeline/__init__.py (100%) rename {py => deprecated/py}/desispec/pipeline/control.py (100%) rename {py => deprecated/py}/desispec/pipeline/db.py (100%) rename {py => deprecated/py}/desispec/pipeline/defs.py (100%) rename {py => deprecated/py}/desispec/pipeline/plan.py (100%) rename {py => deprecated/py}/desispec/pipeline/prod.py (100%) rename {py => deprecated/py}/desispec/pipeline/run.py (100%) rename {py => deprecated/py}/desispec/pipeline/scriptgen.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/__init__.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/base.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/cframe.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/extract.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/fiberflat.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/fiberflatnight.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/fibermap.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/fluxcalib.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/preproc.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/psf.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/psfnight.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/qadata.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/rawdata.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/redshift.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/sky.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/spectra.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/starfit.py (100%) rename {py => deprecated/py}/desispec/pipeline/tasks/traceshift.py (100%) rename {py => deprecated/py}/desispec/qa/__init__.py (100%) rename {py => deprecated/py}/desispec/qa/html.py (100%) rename {py => deprecated/py}/desispec/qa/qa_brick.py (100%) rename {py => deprecated/py}/desispec/qa/qa_exposure.py (100%) rename {py => deprecated/py}/desispec/qa/qa_frame.py (100%) rename {py => deprecated/py}/desispec/qa/qa_multiexp.py (100%) rename {py => deprecated/py}/desispec/qa/qa_night.py (100%) rename {py => deprecated/py}/desispec/qa/qa_plots.py (100%) rename {py => deprecated/py}/desispec/qa/qa_plots_ql.py (100%) rename {py => deprecated/py}/desispec/qa/qa_prod.py (100%) rename {py => deprecated/py}/desispec/qa/qa_quicklook.py (100%) rename {py => deprecated/py}/desispec/qa/qalib.py (100%) rename {py => deprecated/py}/desispec/qa/utils.py (100%) rename {py => deprecated/py}/desispec/quicklook/__init__.py (100%) rename {py => deprecated/py}/desispec/quicklook/arcprocess.py (100%) rename {py => deprecated/py}/desispec/quicklook/merger.py (100%) rename {py => deprecated/py}/desispec/quicklook/palib.py (100%) rename {py => deprecated/py}/desispec/quicklook/pas.py (100%) rename {py => deprecated/py}/desispec/quicklook/procalgs.py (100%) rename {py => deprecated/py}/desispec/quicklook/qas.py (100%) rename {py => deprecated/py}/desispec/quicklook/ql_plotlib.py (100%) rename {py => deprecated/py}/desispec/quicklook/qlboxcar.py (100%) rename {py => deprecated/py}/desispec/quicklook/qlconfig.py (100%) rename {py => deprecated/py}/desispec/quicklook/qlexceptions.py (100%) rename {py => deprecated/py}/desispec/quicklook/qlheartbeat.py (100%) rename {py => deprecated/py}/desispec/quicklook/qllogger.py (100%) rename {py => deprecated/py}/desispec/quicklook/qlpsf.py (100%) rename {py => deprecated/py}/desispec/quicklook/qlresolution.py (100%) rename {py => deprecated/py}/desispec/quicklook/quickfiberflat.py (100%) rename {py => deprecated/py}/desispec/quicklook/quicklook.py (100%) rename {py => deprecated/py}/desispec/quicklook/quicksky.py (100%) rename {py => deprecated/py}/desispec/scripts/qa_exposure.py (100%) rename {py => deprecated/py}/desispec/scripts/qa_frame.py (100%) rename {py => deprecated/py}/desispec/scripts/qa_night.py (100%) rename {py => deprecated/py}/desispec/scripts/qa_prod.py (100%) rename {py => deprecated/py}/desispec/scripts/skysubresid.py (100%) rename {py => deprecated/py}/desispec/test/integration_test.py (100%) rename {py => deprecated/py}/desispec/test/old_integration_test.py (100%) rename {py => deprecated/py}/desispec/test/test_qa.py (100%) rename {py => deprecated/py}/desispec/test/test_ql_pa.py (100%) rename {py => deprecated/py}/desispec/test/test_ql_qa.py (100%) rename {py => deprecated/py}/desispec/test/test_qlextract.py (100%) diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 7154e2f80..000000000 --- a/.coveragerc +++ /dev/null @@ -1,28 +0,0 @@ -[run] -source = py/desispec -omit = - py/desispec/_version.py - py/desispec/conftest* - py/desispec/cython_version* - py/desispec/setup_package* - py/desispec/*/setup_package* - py/desispec/*/*/setup_package* - py/desispec/sphinx/* - py/desispec/test/* - py/desispec/*/test/* - py/desispec/*/*/test/* - -[report] -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about packages we have installed - except ImportError - - # Don't complain if tests don't hit assertions - raise AssertionError - raise NotImplementedError - - # Don't complain about script hooks - ### def main\(.*\): diff --git a/bin/desi_qa_exposure b/deprecated/bin/desi_qa_exposure similarity index 100% rename from bin/desi_qa_exposure rename to deprecated/bin/desi_qa_exposure diff --git a/bin/desi_qa_frame b/deprecated/bin/desi_qa_frame similarity index 100% rename from bin/desi_qa_frame rename to deprecated/bin/desi_qa_frame diff --git a/bin/desi_qa_night b/deprecated/bin/desi_qa_night similarity index 100% rename from bin/desi_qa_night rename to deprecated/bin/desi_qa_night diff --git a/bin/desi_qa_prod b/deprecated/bin/desi_qa_prod similarity index 100% rename from bin/desi_qa_prod rename to deprecated/bin/desi_qa_prod diff --git a/bin/desi_qa_skyresid b/deprecated/bin/desi_qa_skyresid similarity index 100% rename from bin/desi_qa_skyresid rename to deprecated/bin/desi_qa_skyresid diff --git a/py/desispec/io/qa.py b/deprecated/py/desispec/io/qa.py similarity index 100% rename from py/desispec/io/qa.py rename to deprecated/py/desispec/io/qa.py diff --git a/py/desispec/pipeline/__init__.py b/deprecated/py/desispec/pipeline/__init__.py similarity index 100% rename from py/desispec/pipeline/__init__.py rename to deprecated/py/desispec/pipeline/__init__.py diff --git a/py/desispec/pipeline/control.py b/deprecated/py/desispec/pipeline/control.py similarity index 100% rename from py/desispec/pipeline/control.py rename to deprecated/py/desispec/pipeline/control.py diff --git a/py/desispec/pipeline/db.py b/deprecated/py/desispec/pipeline/db.py similarity index 100% rename from py/desispec/pipeline/db.py rename to deprecated/py/desispec/pipeline/db.py diff --git a/py/desispec/pipeline/defs.py b/deprecated/py/desispec/pipeline/defs.py similarity index 100% rename from py/desispec/pipeline/defs.py rename to deprecated/py/desispec/pipeline/defs.py diff --git a/py/desispec/pipeline/plan.py b/deprecated/py/desispec/pipeline/plan.py similarity index 100% rename from py/desispec/pipeline/plan.py rename to deprecated/py/desispec/pipeline/plan.py diff --git a/py/desispec/pipeline/prod.py b/deprecated/py/desispec/pipeline/prod.py similarity index 100% rename from py/desispec/pipeline/prod.py rename to deprecated/py/desispec/pipeline/prod.py diff --git a/py/desispec/pipeline/run.py b/deprecated/py/desispec/pipeline/run.py similarity index 100% rename from py/desispec/pipeline/run.py rename to deprecated/py/desispec/pipeline/run.py diff --git a/py/desispec/pipeline/scriptgen.py b/deprecated/py/desispec/pipeline/scriptgen.py similarity index 100% rename from py/desispec/pipeline/scriptgen.py rename to deprecated/py/desispec/pipeline/scriptgen.py diff --git a/py/desispec/pipeline/tasks/__init__.py b/deprecated/py/desispec/pipeline/tasks/__init__.py similarity index 100% rename from py/desispec/pipeline/tasks/__init__.py rename to deprecated/py/desispec/pipeline/tasks/__init__.py diff --git a/py/desispec/pipeline/tasks/base.py b/deprecated/py/desispec/pipeline/tasks/base.py similarity index 100% rename from py/desispec/pipeline/tasks/base.py rename to deprecated/py/desispec/pipeline/tasks/base.py diff --git a/py/desispec/pipeline/tasks/cframe.py b/deprecated/py/desispec/pipeline/tasks/cframe.py similarity index 100% rename from py/desispec/pipeline/tasks/cframe.py rename to deprecated/py/desispec/pipeline/tasks/cframe.py diff --git a/py/desispec/pipeline/tasks/extract.py b/deprecated/py/desispec/pipeline/tasks/extract.py similarity index 100% rename from py/desispec/pipeline/tasks/extract.py rename to deprecated/py/desispec/pipeline/tasks/extract.py diff --git a/py/desispec/pipeline/tasks/fiberflat.py b/deprecated/py/desispec/pipeline/tasks/fiberflat.py similarity index 100% rename from py/desispec/pipeline/tasks/fiberflat.py rename to deprecated/py/desispec/pipeline/tasks/fiberflat.py diff --git a/py/desispec/pipeline/tasks/fiberflatnight.py b/deprecated/py/desispec/pipeline/tasks/fiberflatnight.py similarity index 100% rename from py/desispec/pipeline/tasks/fiberflatnight.py rename to deprecated/py/desispec/pipeline/tasks/fiberflatnight.py diff --git a/py/desispec/pipeline/tasks/fibermap.py b/deprecated/py/desispec/pipeline/tasks/fibermap.py similarity index 100% rename from py/desispec/pipeline/tasks/fibermap.py rename to deprecated/py/desispec/pipeline/tasks/fibermap.py diff --git a/py/desispec/pipeline/tasks/fluxcalib.py b/deprecated/py/desispec/pipeline/tasks/fluxcalib.py similarity index 100% rename from py/desispec/pipeline/tasks/fluxcalib.py rename to deprecated/py/desispec/pipeline/tasks/fluxcalib.py diff --git a/py/desispec/pipeline/tasks/preproc.py b/deprecated/py/desispec/pipeline/tasks/preproc.py similarity index 100% rename from py/desispec/pipeline/tasks/preproc.py rename to deprecated/py/desispec/pipeline/tasks/preproc.py diff --git a/py/desispec/pipeline/tasks/psf.py b/deprecated/py/desispec/pipeline/tasks/psf.py similarity index 100% rename from py/desispec/pipeline/tasks/psf.py rename to deprecated/py/desispec/pipeline/tasks/psf.py diff --git a/py/desispec/pipeline/tasks/psfnight.py b/deprecated/py/desispec/pipeline/tasks/psfnight.py similarity index 100% rename from py/desispec/pipeline/tasks/psfnight.py rename to deprecated/py/desispec/pipeline/tasks/psfnight.py diff --git a/py/desispec/pipeline/tasks/qadata.py b/deprecated/py/desispec/pipeline/tasks/qadata.py similarity index 100% rename from py/desispec/pipeline/tasks/qadata.py rename to deprecated/py/desispec/pipeline/tasks/qadata.py diff --git a/py/desispec/pipeline/tasks/rawdata.py b/deprecated/py/desispec/pipeline/tasks/rawdata.py similarity index 100% rename from py/desispec/pipeline/tasks/rawdata.py rename to deprecated/py/desispec/pipeline/tasks/rawdata.py diff --git a/py/desispec/pipeline/tasks/redshift.py b/deprecated/py/desispec/pipeline/tasks/redshift.py similarity index 100% rename from py/desispec/pipeline/tasks/redshift.py rename to deprecated/py/desispec/pipeline/tasks/redshift.py diff --git a/py/desispec/pipeline/tasks/sky.py b/deprecated/py/desispec/pipeline/tasks/sky.py similarity index 100% rename from py/desispec/pipeline/tasks/sky.py rename to deprecated/py/desispec/pipeline/tasks/sky.py diff --git a/py/desispec/pipeline/tasks/spectra.py b/deprecated/py/desispec/pipeline/tasks/spectra.py similarity index 100% rename from py/desispec/pipeline/tasks/spectra.py rename to deprecated/py/desispec/pipeline/tasks/spectra.py diff --git a/py/desispec/pipeline/tasks/starfit.py b/deprecated/py/desispec/pipeline/tasks/starfit.py similarity index 100% rename from py/desispec/pipeline/tasks/starfit.py rename to deprecated/py/desispec/pipeline/tasks/starfit.py diff --git a/py/desispec/pipeline/tasks/traceshift.py b/deprecated/py/desispec/pipeline/tasks/traceshift.py similarity index 100% rename from py/desispec/pipeline/tasks/traceshift.py rename to deprecated/py/desispec/pipeline/tasks/traceshift.py diff --git a/py/desispec/qa/__init__.py b/deprecated/py/desispec/qa/__init__.py similarity index 100% rename from py/desispec/qa/__init__.py rename to deprecated/py/desispec/qa/__init__.py diff --git a/py/desispec/qa/html.py b/deprecated/py/desispec/qa/html.py similarity index 100% rename from py/desispec/qa/html.py rename to deprecated/py/desispec/qa/html.py diff --git a/py/desispec/qa/qa_brick.py b/deprecated/py/desispec/qa/qa_brick.py similarity index 100% rename from py/desispec/qa/qa_brick.py rename to deprecated/py/desispec/qa/qa_brick.py diff --git a/py/desispec/qa/qa_exposure.py b/deprecated/py/desispec/qa/qa_exposure.py similarity index 100% rename from py/desispec/qa/qa_exposure.py rename to deprecated/py/desispec/qa/qa_exposure.py diff --git a/py/desispec/qa/qa_frame.py b/deprecated/py/desispec/qa/qa_frame.py similarity index 100% rename from py/desispec/qa/qa_frame.py rename to deprecated/py/desispec/qa/qa_frame.py diff --git a/py/desispec/qa/qa_multiexp.py b/deprecated/py/desispec/qa/qa_multiexp.py similarity index 100% rename from py/desispec/qa/qa_multiexp.py rename to deprecated/py/desispec/qa/qa_multiexp.py diff --git a/py/desispec/qa/qa_night.py b/deprecated/py/desispec/qa/qa_night.py similarity index 100% rename from py/desispec/qa/qa_night.py rename to deprecated/py/desispec/qa/qa_night.py diff --git a/py/desispec/qa/qa_plots.py b/deprecated/py/desispec/qa/qa_plots.py similarity index 100% rename from py/desispec/qa/qa_plots.py rename to deprecated/py/desispec/qa/qa_plots.py diff --git a/py/desispec/qa/qa_plots_ql.py b/deprecated/py/desispec/qa/qa_plots_ql.py similarity index 100% rename from py/desispec/qa/qa_plots_ql.py rename to deprecated/py/desispec/qa/qa_plots_ql.py diff --git a/py/desispec/qa/qa_prod.py b/deprecated/py/desispec/qa/qa_prod.py similarity index 100% rename from py/desispec/qa/qa_prod.py rename to deprecated/py/desispec/qa/qa_prod.py diff --git a/py/desispec/qa/qa_quicklook.py b/deprecated/py/desispec/qa/qa_quicklook.py similarity index 100% rename from py/desispec/qa/qa_quicklook.py rename to deprecated/py/desispec/qa/qa_quicklook.py diff --git a/py/desispec/qa/qalib.py b/deprecated/py/desispec/qa/qalib.py similarity index 100% rename from py/desispec/qa/qalib.py rename to deprecated/py/desispec/qa/qalib.py diff --git a/py/desispec/qa/utils.py b/deprecated/py/desispec/qa/utils.py similarity index 100% rename from py/desispec/qa/utils.py rename to deprecated/py/desispec/qa/utils.py diff --git a/py/desispec/quicklook/__init__.py b/deprecated/py/desispec/quicklook/__init__.py similarity index 100% rename from py/desispec/quicklook/__init__.py rename to deprecated/py/desispec/quicklook/__init__.py diff --git a/py/desispec/quicklook/arcprocess.py b/deprecated/py/desispec/quicklook/arcprocess.py similarity index 100% rename from py/desispec/quicklook/arcprocess.py rename to deprecated/py/desispec/quicklook/arcprocess.py diff --git a/py/desispec/quicklook/merger.py b/deprecated/py/desispec/quicklook/merger.py similarity index 100% rename from py/desispec/quicklook/merger.py rename to deprecated/py/desispec/quicklook/merger.py diff --git a/py/desispec/quicklook/palib.py b/deprecated/py/desispec/quicklook/palib.py similarity index 100% rename from py/desispec/quicklook/palib.py rename to deprecated/py/desispec/quicklook/palib.py diff --git a/py/desispec/quicklook/pas.py b/deprecated/py/desispec/quicklook/pas.py similarity index 100% rename from py/desispec/quicklook/pas.py rename to deprecated/py/desispec/quicklook/pas.py diff --git a/py/desispec/quicklook/procalgs.py b/deprecated/py/desispec/quicklook/procalgs.py similarity index 100% rename from py/desispec/quicklook/procalgs.py rename to deprecated/py/desispec/quicklook/procalgs.py diff --git a/py/desispec/quicklook/qas.py b/deprecated/py/desispec/quicklook/qas.py similarity index 100% rename from py/desispec/quicklook/qas.py rename to deprecated/py/desispec/quicklook/qas.py diff --git a/py/desispec/quicklook/ql_plotlib.py b/deprecated/py/desispec/quicklook/ql_plotlib.py similarity index 100% rename from py/desispec/quicklook/ql_plotlib.py rename to deprecated/py/desispec/quicklook/ql_plotlib.py diff --git a/py/desispec/quicklook/qlboxcar.py b/deprecated/py/desispec/quicklook/qlboxcar.py similarity index 100% rename from py/desispec/quicklook/qlboxcar.py rename to deprecated/py/desispec/quicklook/qlboxcar.py diff --git a/py/desispec/quicklook/qlconfig.py b/deprecated/py/desispec/quicklook/qlconfig.py similarity index 100% rename from py/desispec/quicklook/qlconfig.py rename to deprecated/py/desispec/quicklook/qlconfig.py diff --git a/py/desispec/quicklook/qlexceptions.py b/deprecated/py/desispec/quicklook/qlexceptions.py similarity index 100% rename from py/desispec/quicklook/qlexceptions.py rename to deprecated/py/desispec/quicklook/qlexceptions.py diff --git a/py/desispec/quicklook/qlheartbeat.py b/deprecated/py/desispec/quicklook/qlheartbeat.py similarity index 100% rename from py/desispec/quicklook/qlheartbeat.py rename to deprecated/py/desispec/quicklook/qlheartbeat.py diff --git a/py/desispec/quicklook/qllogger.py b/deprecated/py/desispec/quicklook/qllogger.py similarity index 100% rename from py/desispec/quicklook/qllogger.py rename to deprecated/py/desispec/quicklook/qllogger.py diff --git a/py/desispec/quicklook/qlpsf.py b/deprecated/py/desispec/quicklook/qlpsf.py similarity index 100% rename from py/desispec/quicklook/qlpsf.py rename to deprecated/py/desispec/quicklook/qlpsf.py diff --git a/py/desispec/quicklook/qlresolution.py b/deprecated/py/desispec/quicklook/qlresolution.py similarity index 100% rename from py/desispec/quicklook/qlresolution.py rename to deprecated/py/desispec/quicklook/qlresolution.py diff --git a/py/desispec/quicklook/quickfiberflat.py b/deprecated/py/desispec/quicklook/quickfiberflat.py similarity index 100% rename from py/desispec/quicklook/quickfiberflat.py rename to deprecated/py/desispec/quicklook/quickfiberflat.py diff --git a/py/desispec/quicklook/quicklook.py b/deprecated/py/desispec/quicklook/quicklook.py similarity index 100% rename from py/desispec/quicklook/quicklook.py rename to deprecated/py/desispec/quicklook/quicklook.py diff --git a/py/desispec/quicklook/quicksky.py b/deprecated/py/desispec/quicklook/quicksky.py similarity index 100% rename from py/desispec/quicklook/quicksky.py rename to deprecated/py/desispec/quicklook/quicksky.py diff --git a/py/desispec/scripts/qa_exposure.py b/deprecated/py/desispec/scripts/qa_exposure.py similarity index 100% rename from py/desispec/scripts/qa_exposure.py rename to deprecated/py/desispec/scripts/qa_exposure.py diff --git a/py/desispec/scripts/qa_frame.py b/deprecated/py/desispec/scripts/qa_frame.py similarity index 100% rename from py/desispec/scripts/qa_frame.py rename to deprecated/py/desispec/scripts/qa_frame.py diff --git a/py/desispec/scripts/qa_night.py b/deprecated/py/desispec/scripts/qa_night.py similarity index 100% rename from py/desispec/scripts/qa_night.py rename to deprecated/py/desispec/scripts/qa_night.py diff --git a/py/desispec/scripts/qa_prod.py b/deprecated/py/desispec/scripts/qa_prod.py similarity index 100% rename from py/desispec/scripts/qa_prod.py rename to deprecated/py/desispec/scripts/qa_prod.py diff --git a/py/desispec/scripts/skysubresid.py b/deprecated/py/desispec/scripts/skysubresid.py similarity index 100% rename from py/desispec/scripts/skysubresid.py rename to deprecated/py/desispec/scripts/skysubresid.py diff --git a/py/desispec/test/integration_test.py b/deprecated/py/desispec/test/integration_test.py similarity index 100% rename from py/desispec/test/integration_test.py rename to deprecated/py/desispec/test/integration_test.py diff --git a/py/desispec/test/old_integration_test.py b/deprecated/py/desispec/test/old_integration_test.py similarity index 100% rename from py/desispec/test/old_integration_test.py rename to deprecated/py/desispec/test/old_integration_test.py diff --git a/py/desispec/test/test_qa.py b/deprecated/py/desispec/test/test_qa.py similarity index 100% rename from py/desispec/test/test_qa.py rename to deprecated/py/desispec/test/test_qa.py diff --git a/py/desispec/test/test_ql_pa.py b/deprecated/py/desispec/test/test_ql_pa.py similarity index 100% rename from py/desispec/test/test_ql_pa.py rename to deprecated/py/desispec/test/test_ql_pa.py diff --git a/py/desispec/test/test_ql_qa.py b/deprecated/py/desispec/test/test_ql_qa.py similarity index 100% rename from py/desispec/test/test_ql_qa.py rename to deprecated/py/desispec/test/test_ql_qa.py diff --git a/py/desispec/test/test_qlextract.py b/deprecated/py/desispec/test/test_qlextract.py similarity index 100% rename from py/desispec/test/test_qlextract.py rename to deprecated/py/desispec/test/test_qlextract.py diff --git a/py/desispec/io/__init__.py b/py/desispec/io/__init__.py index e6da18194..7ae5a820f 100644 --- a/py/desispec/io/__init__.py +++ b/py/desispec/io/__init__.py @@ -26,15 +26,12 @@ from .xytraceset import read_xytraceset, write_xytraceset from .image import read_image, write_image from .meta import (findfile, get_exposures, get_files, get_raw_files, - rawdata_root, specprod_root, validate_night, qaprod_root, + rawdata_root, specprod_root, validate_night, get_pipe_rundir, get_pipe_scriptdir, get_pipe_database, get_pipe_logdir, get_reduced_frames, get_pipe_pixeldir, get_nights, get_pipe_nightdir, find_exposure_night, shorten_filename, get_readonly_filepath) from .params import read_params -from .qa import (read_qa_frame, read_qa_data, write_qa_frame, write_qa_brick, - load_qa_frame, write_qa_exposure, write_qa_multiexp, load_qa_multiexp, - qafile_from_framefile) from .exposure_tile_qa import (read_exposure_qa, write_exposure_qa, read_tile_qa, write_tile_qa) from .raw import read_raw, write_raw from .sky import read_sky, write_sky diff --git a/py/desispec/io/meta.py b/py/desispec/io/meta.py index c239adbfb..ca7e63621 100755 --- a/py/desispec/io/meta.py +++ b/py/desispec/io/meta.py @@ -133,7 +133,7 @@ def findfile(filetype, night=None, expid=None, camera=None, healpix=None, nside=64, band=None, spectrograph=None, survey=None, faprogram=None, version=None, rawdata_dir=None, specprod_dir=None, specprod=None, - qaprod_dir=None, tiles_dir=None, outdir=None, + tiles_dir=None, outdir=None, download=False, return_exists=False, readonly=False, logfile=False): """Returns location where file should be @@ -160,7 +160,6 @@ def findfile(filetype, night=None, expid=None, camera=None, rawdata_dir : overrides $DESI_SPECTRO_DATA specprod_dir : overrides $DESI_SPECTRO_REDUX/$SPECPROD/ specprod : production name, or full path to production - qaprod_dir : defaults to $DESI_SPECTRO_REDUX/$SPECPROD/QA/ if not provided tiles_dir : defaults to $FIBER_ASSIGN_DIR if not provided download : if not found locally, try to fetch remotely outdir : use this directory for output instead of canonical location @@ -294,29 +293,6 @@ def findfile(filetype, night=None, expid=None, camera=None, zcat_tile = '{specprod_dir}/zcatalog/{version}/ztile-{survey}-{faprogram}-{groupname}.fits', zall_hp = '{specprod_dir}/zcatalog/{version}/zall-pix-{specprod}.fits', zall_tile='{specprod_dir}/zcatalog/{version}/zall-tile{groupname}-{specprod}.fits', - # - # Deprecated QA files below this point. - # - qa_data = '{qaprod_dir}/exposures/{night}/{expid:08d}/qa-{camera}-{expid:08d}.yaml', - qa_data_exp = '{qaprod_dir}/exposures/{night}/{expid:08d}/qa-{expid:08d}.yaml', - qa_bootcalib = '{qaprod_dir}/calib2d/psf/{night}/qa-psfboot-{camera}.pdf', - qa_sky_fig = '{qaprod_dir}/exposures/{night}/{expid:08d}/qa-sky-{camera}-{expid:08d}.png', - qa_skychi_fig = '{qaprod_dir}/exposures/{night}/{expid:08d}/qa-skychi-{camera}-{expid:08d}.png', - qa_s2n_fig = '{qaprod_dir}/exposures/{night}/{expid:08d}/qa-s2n-{camera}-{expid:08d}.png', - qa_flux_fig = '{qaprod_dir}/exposures/{night}/{expid:08d}/qa-flux-{camera}-{expid:08d}.png', - qa_toplevel_html = '{qaprod_dir}/qa-toplevel.html', - qa_calib = '{qaprod_dir}/calib2d/{night}/qa-{camera}-{expid:08d}.yaml', - qa_calib_html = '{qaprod_dir}/calib2d/qa-calib2d.html', - qa_calib_exp = '{qaprod_dir}/calib2d/{night}/qa-{expid:08d}.yaml', - qa_calib_exp_html = '{qaprod_dir}/calib2d/{night}/qa-{expid:08d}.html', - qa_exposures_html = '{qaprod_dir}/exposures/qa-exposures.html', - qa_exposure_html = '{qaprod_dir}/exposures/{night}/{expid:08d}/qa-{expid:08d}.html', - qa_flat_fig = '{qaprod_dir}/calib2d/{night}/qa-flat-{camera}-{expid:08d}.png', - qa_ztruth = '{qaprod_dir}/exposures/{night}/qa-ztruth-{night}.yaml', - qa_ztruth_fig = '{qaprod_dir}/exposures/{night}/qa-ztruth-{night}.png', - ql_fig = '{specprod_dir}/exposures/{night}/{expid:08d}/ql-qlfig-{camera}-{expid:08d}.png', - ql_file = '{specprod_dir}/exposures/{night}/{expid:08d}/ql-qlfile-{camera}-{expid:08d}.json', - ql_mergedQA_file = '{specprod_dir}/exposures/{night}/{expid:08d}/ql-mergedQA-{camera}-{expid:08d}.json', ) ## aliases location['desi'] = location['raw'] @@ -434,9 +410,6 @@ def findfile(filetype, night=None, expid=None, camera=None, # but we may need the variable to be set in the meantime specprod_dir = "dummy" - if qaprod_dir is None and 'qaprod_dir' in required_inputs: - qaprod_dir = qaprod_root(specprod_dir=specprod_dir) - if tiles_dir is None and 'tiles_dir' in required_inputs: tiles_dir = os.environ['FIBER_ASSIGN_DIR'] @@ -456,7 +429,7 @@ def findfile(filetype, night=None, expid=None, camera=None, raise ValueError('Camera {} should be b0,r1..z9, or with ?* wildcards'.format(camera)) actual_inputs = { - 'specprod_dir':specprod_dir, 'specprod':specprod, 'qaprod_dir':qaprod_dir, 'tiles_dir':tiles_dir, + 'specprod_dir':specprod_dir, 'specprod':specprod, 'tiles_dir':tiles_dir, 'night':night, 'expid':expid, 'tile':tile, 'camera':camera, 'groupname':groupname, 'subgroup':subgroup, 'version':version, 'healpix':healpix, 'nside':nside, 'hpixdir':hpixdir, 'band':band, @@ -552,7 +525,7 @@ def get_raw_files(filetype, night, expid, rawdata_dir=None): return files -def get_files(filetype, night, expid, specprod_dir=None, qaprod_dir=None, **kwargs): +def get_files(filetype, night, expid, specprod_dir=None, **kwargs): """Get files for a specified exposure. Uses :func:`findfile` to determine the valid file names for the specified @@ -572,8 +545,7 @@ def get_files(filetype, night, expid, specprod_dir=None, qaprod_dir=None, **kwar dict: Dictionary of found file names using camera id strings as keys, which are guaranteed to match the regular expression [brz][0-9]. """ - glob_pattern = findfile(filetype, night, expid, camera='*', specprod_dir=specprod_dir, - qaprod_dir=qaprod_dir) + glob_pattern = findfile(filetype, night, expid, camera='*', specprod_dir=specprod_dir) literals = [re.escape(tmp) for tmp in glob_pattern.split('*')] re_pattern = re.compile('([brz][0-9])'.join(literals)) files = { } @@ -838,17 +810,6 @@ def specprod_root(specprod=None, readonly=False): return specprod -def qaprod_root(specprod_dir=None): - """Return directory root for spectro production QA, i.e. - ``$DESI_SPECTRO_REDUX/$SPECPROD/QA``. - - Raises: - KeyError: if these environment variables aren't set. - """ - if specprod_dir is None: - specprod_dir = specprod_root() - return os.path.join(specprod_dir, 'QA') - def faflavor2program(faflavor): """ Map FAFLAVOR keywords to what we wish we had set for FAPRGRM diff --git a/py/desispec/scripts/fiberflat.py b/py/desispec/scripts/fiberflat.py index 3391eba2b..0c14d2c33 100644 --- a/py/desispec/scripts/fiberflat.py +++ b/py/desispec/scripts/fiberflat.py @@ -14,9 +14,6 @@ from desispec.io import write_fiberflat from desispec.fiberflat import compute_fiberflat from desiutil.log import get_logger -from desispec.io.qa import load_qa_frame -from desispec.io import write_qa_frame -from desispec.qa import qa_plots from desispec.cosmics import reject_cosmic_rays_1d import argparse @@ -27,10 +24,6 @@ def parse(options=None): help = 'path of DESI frame fits file corresponding to a continuum lamp exposure') parser.add_argument('-o','--outfile', type = str, default = None, required=True, help = 'path of DESI fiberflat fits file') - parser.add_argument('--qafile', type=str, default=None, required=False, - help='path of QA file') - parser.add_argument('--qafig', type = str, default = None, required=False, - help = 'path of QA figure file') parser.add_argument('--nsig', type = float, default = 10, required=False, help = 'nsigma clipping') parser.add_argument('--acc', type = float, default = 5.e-4, required=False, @@ -61,21 +54,6 @@ def main(args=None) : fiberflat = compute_fiberflat(frame,nsig_clipping=args.nsig,accuracy=args.acc,smoothing_res=args.smoothing_resolution) - # QA - if (args.qafile is not None): - log.info("performing fiberflat QA") - # Load - qaframe = load_qa_frame(args.qafile, frame_meta=frame.meta, flavor=frame.meta['FLAVOR']) - # Run - qaframe.run_qa('FIBERFLAT', (frame, fiberflat)) - # Write - if args.qafile is not None: - write_qa_frame(args.qafile, qaframe) - log.info("successfully wrote {:s}".format(args.qafile)) - # Figure(s) - if args.qafig is not None: - qa_plots.frame_fiberflat(args.qafig, qaframe, frame, fiberflat) - # Write write_fiberflat(args.outfile, fiberflat, frame.meta) log.info("successfully wrote %s"%args.outfile) diff --git a/py/desispec/scripts/sky.py b/py/desispec/scripts/sky.py index 3b65c1530..70654e909 100644 --- a/py/desispec/scripts/sky.py +++ b/py/desispec/scripts/sky.py @@ -9,8 +9,6 @@ from desispec.io import read_frame from desispec.io import read_fiberflat from desispec.io import write_sky -from desispec.io.qa import load_qa_frame -from desispec.io import write_qa_frame from desispec.io import shorten_filename from desispec.io import write_skycorr from desispec.io import read_skycorr_pca @@ -19,7 +17,6 @@ from desispec.skycorr import SkyCorr from desispec.fiberflat import apply_fiberflat from desispec.sky import compute_sky -from desispec.qa import qa_plots from desispec.cosmics import reject_cosmic_rays_1d from desiutil.log import get_logger import argparse @@ -35,10 +32,6 @@ def parse(options=None): help = 'path of DESI fiberflat fits file') parser.add_argument('-o','--outfile', type = str, default = None, required=True, help = 'path of DESI sky fits file') - parser.add_argument('--qafile', type = str, default = None, required=False, - help = 'path of QA file. Will calculate for Sky Subtraction') - parser.add_argument('--qafig', type = str, default = None, required=False, - help = 'path of QA figure file') parser.add_argument('--cosmics-nsig', type = float, default = 0, required=False, help = 'n sigma rejection for cosmics in 1D (default, no rejection)') parser.add_argument('--no-extra-variance', action='store_true', @@ -139,21 +132,6 @@ def main(args=None) : write_skycorr(args.save_adjustments,skycorr) log.info("wrote {}".format(args.save_adjustments)) - # QA - if (args.qafile is not None) or (args.qafig is not None): - log.info("performing skysub QA") - # Load - qaframe = load_qa_frame(args.qafile, frame_meta=frame.meta, flavor=frame.meta['FLAVOR']) - # Run - qaframe.run_qa('SKYSUB', (frame, skymodel)) - # Write - if args.qafile is not None: - write_qa_frame(args.qafile, qaframe) - log.info("successfully wrote {:s}".format(args.qafile)) - # Figure(s) - if args.qafig is not None: - qa_plots.frame_skyres(args.qafig, frame, skymodel, qaframe) - # record inputs frame.meta['IN_FRAME'] = shorten_filename(args.infile) frame.meta['FIBERFLT'] = shorten_filename(args.fiberflat) diff --git a/py/desispec/test/test_binscripts.py b/py/desispec/test/test_binscripts.py index 21523358f..41e96b2d4 100644 --- a/py/desispec/test/test_binscripts.py +++ b/py/desispec/test/test_binscripts.py @@ -42,9 +42,6 @@ def setUpClass(cls): cls.modelfile ='stdstar_templates-'+id+'.fits' cls.skyfile = 'sky-'+id+'.fits.gz' cls.stdfile = 'std-'+id+'.fits.gz' - cls.qa_calib_file = 'qa-calib-'+id+'.yaml' - cls.qa_data_file = 'qa-data-'+id+'.yaml' - cls.qafig = 'qa-'+id+'.pdf' #- when running "python setup.py test", this file is run from different #- locations for python 2.7 vs. 3.5 @@ -227,11 +224,9 @@ def test_compute_fiberflat(self): self._write_frame(flavor='flat') self._write_fibermap() - # QA fig requires fibermapfile - cmd = '{} {}/desi_compute_fiberflat --infile {} --outfile {} --qafile {} --qafig {}'.format( - sys.executable, self.binDir, self.framefile, - self.fiberflatfile, self.qa_calib_file, self.qafig) - outputs = [self.fiberflatfile,self.qa_calib_file,self.qafig] + cmd = '{} {}/desi_compute_fiberflat --infile {} --outfile {}'.format( + sys.executable, self.binDir, self.framefile, self.fiberflatfile) + outputs = [self.fiberflatfile,] inputs = [self.framefile,] result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=True) self.assertTrue(success, 'FAILED: {}'.format(cmd)) @@ -325,11 +320,11 @@ def test_compute_fluxcalib(self): self._write_skymodel() self._write_stdstars() - cmd = "{} {}/desi_compute_fluxcalibration --infile {} --fiberflat {} --sky {} --models {} --outfile {} --qafile {} --qafig {} --min-color 0.".format( + cmd = "{} {}/desi_compute_fluxcalibration --infile {} --fiberflat {} --sky {} --models {} --outfile {} --min-color 0.".format( sys.executable, self.binDir, self.framefile, self.fiberflatfile, self.skyfile, self.stdfile, - self.calibfile, self.qa_data_file, self.qafig) + self.calibfile) inputs = [self.framefile, self.fiberflatfile, self.skyfile, self.stdfile] - outputs = [self.calibfile,self.qa_data_file,self.qafig,] + outputs = [self.calibfile,] result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=True) self.assertTrue(success, 'FAILED: {}'.format(cmd)) @@ -349,10 +344,10 @@ def test_compute_sky(self): self._write_fiberflat() self._write_fibermap() - cmd = "{} {}/desi_compute_sky --infile {} --fiberflat {} --outfile {} --qafile {} --qafig {}".format( - sys.executable, self.binDir, self.framefile, self.fiberflatfile, self.skyfile, self.qa_data_file, self.qafig) + cmd = "{} {}/desi_compute_sky --infile {} --fiberflat {} --outfile {}".format( + sys.executable, self.binDir, self.framefile, self.fiberflatfile, self.skyfile) inputs = [self.framefile, self.fiberflatfile] - outputs = [self.skyfile,self.qa_data_file,self.qafig,] + outputs = [self.skyfile,] result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=True) self.assertEqual(result, 0, 'FAILED: {}'.format(cmd)) self.assertTrue(success, 'FAILED: {}'.format(cmd)) diff --git a/py/desispec/test/test_bootcalib.py b/py/desispec/test/test_bootcalib.py index 699727b7c..21607edf5 100644 --- a/py/desispec/test/test_bootcalib.py +++ b/py/desispec/test/test_bootcalib.py @@ -196,30 +196,22 @@ def test_main(self): bootscript.main(args) #- Ensure the PSF class can read that file - from desispec.quicklook.qlpsf import PSF - psf = PSF(self.testout) + # from desispec.quicklook.qlpsf import PSF + # psf = PSF(self.testout) + + #- Ensure the output format is useably by xytraceset + from desispec.io import read_xytraceset + psf = read_xytraceset(self.testout) #- While we're at it, test some PSF accessor functions indices = np.array([0,1]) - waves = np.array([psf.wmin, psf.wmin+1]) - - w = psf.wavelength() - w = psf.wavelength(ispec=0) - w = psf.wavelength(ispec=indices) - w = psf.wavelength(ispec=indices, y=0) - w = psf.wavelength(ispec=indices, y=indices) - - x = psf.x() - x = psf.x(ispec=0) - x = psf.x(ispec=indices) - x = psf.x(ispec=None, wavelength=psf.wmin) - x = psf.x(ispec=1, wavelength=psf.wmin) - x = psf.x(ispec=indices, wavelength=psf.wmin) - x = psf.x(ispec=indices, wavelength=waves) - - y = psf.y(ispec=None, wavelength=psf.wmin) - y = psf.y(ispec=0, wavelength=psf.wmin) - y = psf.y(ispec=indices, wavelength=psf.wmin) - y = psf.y(ispec=indices, wavelength=waves) + waves = np.array([psf.wavemin, psf.wavemin+1]) + allrows = np.arange(psf.npix_y) + + w = psf.wave_vs_y(fiber=0, y=indices) + w = psf.wave_vs_y(fiber=1, y=allrows) + + x = psf.x_vs_wave(0, waves) + y = psf.y_vs_wave(0, waves) diff --git a/py/desispec/test/test_io.py b/py/desispec/test/test_io.py index 7656e7c62..4bbf383b7 100644 --- a/py/desispec/test/test_io.py +++ b/py/desispec/test/test_io.py @@ -731,29 +731,6 @@ def test_image_rw(self): for key in meta: self.assertEqual(meta[key], img3.meta[key], 'meta[{}] not propagated'.format(key)) - def test_io_qa_frame(self): - """Test reading and writing QA_Frame. - """ - from ..qa import QA_Frame - from ..io.qa import read_qa_frame, write_qa_frame - nspec = 3 - nwave = 10 - wave = np.arange(nwave) - flux = np.random.uniform(size=(nspec, nwave)) - ivar = np.ones(flux.shape) - frame = Frame(wave, flux, ivar, spectrograph=0) - frame.meta = dict(CAMERA='b0', FLAVOR='science', NIGHT='20160607', EXPID=1) - #- Init - qaframe = QA_Frame(frame) - qaframe.init_skysub() - # Write - write_qa_frame(self.testyfile, qaframe) - # Read - xqaframe = read_qa_frame(self.testyfile) - # Check - self.assertTrue(qaframe.qa_data['SKYSUB']['PARAMS']['PCHI_RESID'] == xqaframe.qa_data['SKYSUB']['PARAMS']['PCHI_RESID']) - self.assertTrue(qaframe.flavor == xqaframe.flavor) - def test_native_endian(self): """Test desiutil.io.util.native_endian. """ From 0f90d63b3baec6ffd7eb828bc3a847bd5332691a Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 16 Oct 2024 20:36:53 -0700 Subject: [PATCH 02/27] move no-op test_ql --- {py => deprecated/py}/desispec/test/test_ql.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {py => deprecated/py}/desispec/test/test_ql.py (100%) diff --git a/py/desispec/test/test_ql.py b/deprecated/py/desispec/test/test_ql.py similarity index 100% rename from py/desispec/test/test_ql.py rename to deprecated/py/desispec/test/test_ql.py From 6686983be10166896272e39b821adfccc3a0a85e Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 16 Oct 2024 22:06:37 -0700 Subject: [PATCH 03/27] xytraceset slicing and unit tests --- py/desispec/test/test_xytraceset.py | 175 ++++++++++++++++++++++++++++ py/desispec/xytraceset.py | 25 ++++ 2 files changed, 200 insertions(+) create mode 100644 py/desispec/test/test_xytraceset.py diff --git a/py/desispec/test/test_xytraceset.py b/py/desispec/test/test_xytraceset.py new file mode 100644 index 000000000..4f70e6f1c --- /dev/null +++ b/py/desispec/test/test_xytraceset.py @@ -0,0 +1,175 @@ +""" +tests desispec.xytraceset +""" + +import unittest +import importlib.resources + +import numpy as np +from numpy.polynomial.legendre import legval + +from desispec.xytraceset import XYTraceSet, get_badamp_fibers +from desispec.io import read_xytraceset + +class TestSky(unittest.TestCase): + + def setUp(self): + self.nspec = 10 + self.ncoef = 5 + self.wavemin = 5000 + self.wavemax = 5100 + self.npix_y = 200 + self.yy = np.arange(self.npix_y) + + self.waves = np.linspace(self.wavemin, self.wavemax) + self.nwave = len(self.waves) + + # reduce waves -> (-1,1) range + self.xw = 2*(self.waves-self.wavemin)/(self.wavemax-self.wavemin) - 1 + + + def tearDown(self): + pass + + def test_minimal_xytraceset(self): + xcoef = np.random.uniform(size=(self.nspec, self.ncoef)) + ycoef = np.random.uniform(size=(self.nspec, self.ncoef)) + + xy = XYTraceSet(xcoef, ycoef, self.wavemin, self.wavemax, self.npix_y) + + x = xy.x_vs_wave(0, self.waves) + self.assertEqual(len(x), self.nwave) + self.assertTrue(np.allclose(x, legval(self.xw, xcoef[0]))) + + y = xy.y_vs_wave(5, self.waves) + self.assertEqual(len(y), self.nwave) + self.assertTrue(np.allclose(y, legval(self.xw, ycoef[5]))) + + w = xy.wave_vs_y(2, self.yy) + self.assertEqual(len(w), self.npix_y) + + x = xy.x_vs_y(3, self.yy) + self.assertEqual(len(x), self.npix_y) + + self.assertEqual(xy.npix_y, self.npix_y) + self.assertEqual(xy.nspec, self.nspec) + + #- no xsigcoef/ysigcoef -> errors + with self.assertRaises(RuntimeError): + xsig = xy.xsig_vs_wave(0, self.waves) + + with self.assertRaises(RuntimeError): + ysig = xy.ysig_vs_wave(0, self.waves) + + def test_xysig(self): + xcoef = np.random.uniform(size=(self.nspec, self.ncoef)) + ycoef = np.random.uniform(size=(self.nspec, self.ncoef)) + xsigcoef = np.random.uniform(size=(self.nspec, self.ncoef)) + ysigcoef = np.random.uniform(size=(self.nspec, self.ncoef)) + meta = dict(blat=1, foo=2) + + xy = XYTraceSet(xcoef, ycoef, self.wavemin, self.wavemax, self.npix_y, + xsigcoef=xsigcoef, ysigcoef=ysigcoef, meta=meta) + + xsig = xy.xsig_vs_wave(0, self.waves) + self.assertEqual(len(xsig), self.nwave) + self.assertTrue(np.allclose(xsig, legval(self.xw, xsigcoef[0]))) + + ysig = xy.ysig_vs_wave(5, self.waves) + self.assertEqual(len(ysig), self.nwave) + self.assertTrue(np.allclose(ysig, legval(self.xw, ysigcoef[5]))) + + self.assertEqual(xy.meta['blat'], 1) + self.assertEqual(xy.meta['foo'], 2) + + def test_xysig(self): + xcoef = np.random.uniform(size=(self.nspec, self.ncoef)) + ycoef = np.random.uniform(size=(self.nspec, self.ncoef)) + xsigcoef = np.random.uniform(size=(self.nspec, self.ncoef)) + ysigcoef = np.random.uniform(size=(self.nspec, self.ncoef)) + meta = dict(blat=1, foo=2) + + #- first test without xsig, ysig + xy = XYTraceSet(xcoef, ycoef, self.wavemin, self.wavemax, self.npix_y) + + xy2 = xy[1:3] + + # xy2 fiber 0 is original fiber 1 + x = xy2.x_vs_wave(0, self.waves) + self.assertEqual(len(x), self.nwave) + self.assertTrue(np.allclose(x, legval(self.xw, xcoef[1]))) + + y = xy2.y_vs_wave(1, self.waves) + self.assertEqual(len(y), self.nwave) + self.assertTrue(np.allclose(y, legval(self.xw, ycoef[2]))) + + #- now test with xsig, ysig + xy = XYTraceSet(xcoef, ycoef, self.wavemin, self.wavemax, self.npix_y, + xsigcoef=xsigcoef, ysigcoef=ysigcoef) + + xy2 = xy[1:3] + + # xy2 fiber 0 is original fiber 1 + x = xy2.x_vs_wave(0, self.waves) + self.assertEqual(len(x), self.nwave) + self.assertTrue(np.allclose(x, legval(self.xw, xcoef[1]))) + + y = xy2.y_vs_wave(1, self.waves) + self.assertEqual(len(y), self.nwave) + self.assertTrue(np.allclose(y, legval(self.xw, ycoef[2]))) + + xsig = xy2.xsig_vs_wave(0, self.waves) + self.assertEqual(len(xsig), self.nwave) + self.assertTrue(np.allclose(xsig, legval(self.xw, xsigcoef[1]))) + + ysig = xy2.ysig_vs_wave(1, self.waves) + self.assertEqual(len(xsig), self.nwave) + self.assertTrue(np.allclose(ysig, legval(self.xw, ysigcoef[2]))) + + xsig = xy2.xsig_vs_y(0, self.yy) + self.assertEqual(len(xsig), self.npix_y) + + ysig = xy2.ysig_vs_y(1, self.yy) + self.assertEqual(len(xsig), self.npix_y) + + def test_badamp(self): + psffile = importlib.resources.files('desispec').joinpath('test/data/ql/psf-r0.fits') + xy = read_xytraceset(psffile) + + header = dict() + fibers = get_badamp_fibers(header, xy) + self.assertEqual(len(fibers), 0) + + header['CCDSECA'] = '[1:2057, 1:2064]' + header['CCDSECB'] = '[2058:4114, 1:2064]' + header['CCDSECC'] = '[1:2057, 2065:4128]' + header['CCDSECD'] = '[2058:4114, 2065:4128]' + + header['BADAMPS'] = 'A' + fibers = get_badamp_fibers(header, xy, verbose=True) + self.assertEqual(len(fibers), 247) + self.assertEqual(np.max(fibers), 246) + + header['BADAMPS'] = 'C' + fibers = get_badamp_fibers(header, xy, verbose=False) + self.assertEqual(len(fibers), 247) + self.assertEqual(np.max(fibers), 246) + + header['BADAMPS'] = 'B' + fibers = get_badamp_fibers(header, xy) + self.assertEqual(len(fibers), 253) + self.assertEqual(np.min(fibers), 247) + + header['BADAMPS'] = 'D' + fibers = get_badamp_fibers(header, xy) + self.assertEqual(len(fibers), 253) + self.assertEqual(np.min(fibers), 247) + + header['BADAMPS'] = 'A,B' + fibers = get_badamp_fibers(header, xy) + self.assertEqual(len(fibers), 500) + + header['BADAMPS'] = 'CB' + fibers = get_badamp_fibers(header, xy) + self.assertEqual(len(fibers), 500) + diff --git a/py/desispec/xytraceset.py b/py/desispec/xytraceset.py index 2e4d18a31..38093d3a7 100644 --- a/py/desispec/xytraceset.py +++ b/py/desispec/xytraceset.py @@ -34,6 +34,11 @@ def __init__(self, xcoef, ycoef, wavemin, wavemax, npix_y, xsigcoef = None, ysig self.wavemax = wavemax self.npix_y = npix_y + self._xcoef = xcoef + self._ycoef = ycoef + self._xsigcoef = xsigcoef + self._ysigcoef = ysigcoef + self.x_vs_wave_traceset = TraceSet(xcoef,[wavemin,wavemax]) self.y_vs_wave_traceset = TraceSet(ycoef,[wavemin,wavemax]) @@ -79,6 +84,26 @@ def xsig_vs_y(self,fiber,y) : def ysig_vs_y(self,fiber,y) : return self.ysig_vs_wave(fiber,self.wave_vs_y(fiber,y)) + + def __getitem__(self, ii): + xcoef = self._xcoef[ii] + ycoef = self._ycoef[ii] + + if self._xsigcoef is not None: + xsigcoef = self._xsigcoef[ii] + else: + xsigcoef = None + + if self._ysigcoef is not None: + ysigcoef = self._ysigcoef[ii] + else: + ysigcoef = None + + return XYTraceSet(xcoef=xcoef, ycoef=ycoef, + wavemin=self.wavemin, wavemax=self.wavemax, + npix_y=self.npix_y, + xsigcoef=xsigcoef, ysigcoef=ysigcoef, + meta=self.meta) """ if self.x_vs_y_traceset is None : From adadf0d510bd7ac970f01044774c8e63caf73cdd Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 11:41:18 -0700 Subject: [PATCH 04/27] example test as basis for other tests --- py/desispec/test/test_example.py | 43 ++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 py/desispec/test/test_example.py diff --git a/py/desispec/test/test_example.py b/py/desispec/test/test_example.py new file mode 100644 index 000000000..91b23ebe5 --- /dev/null +++ b/py/desispec/test/test_example.py @@ -0,0 +1,43 @@ +""" +Example test that can be used as a starting point for other tests + +run these with pytest py/desispec/test/test_example.py +""" + +import unittest + +class TestExample(unittest.TestCase): + + # setUpClass runs once at the start before any tests + @classmethod + def setUpClass(cls): + cls.blat = 1 + + # setUpClass runs once at the end after every test + # e.g. to remove files created by setUpClass + @classmethod + def setUpClass(cls): + cls.blat = 1 + + # setUp runs before every test, e.g. to reset state + def setUp(self): + self.foo = 2 + + # setUp runs after every test, e.g. to reset state + def tearDown(self): + pass + + def test_blat(self): + self.assertEqual(self.blat, 1) + + def test_foo(self): + self.assertEqual(self.foo, 2) + self.foo *= 2 + self.assertEqual(self.foo, 4) + + def test_foo_again(self): + #- even though test_foo changed self.foo, self.setUp() should reset it + self.assertEqual(self.foo, 2) + self.foo *= 2 + self.assertEqual(self.foo, 4) + From 95ed6a3a9565329ccace54474922327b8bdded52 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 14:18:24 -0700 Subject: [PATCH 05/27] badcolumn tests --- py/desispec/test/test_badcolumn.py | 127 +++++++++++++++++++++++++++++ 1 file changed, 127 insertions(+) create mode 100644 py/desispec/test/test_badcolumn.py diff --git a/py/desispec/test/test_badcolumn.py b/py/desispec/test/test_badcolumn.py new file mode 100644 index 000000000..b6d68b907 --- /dev/null +++ b/py/desispec/test/test_badcolumn.py @@ -0,0 +1,127 @@ +""" +Test desispec.badcolumn +""" + +import unittest +import importlib.resources + +import numpy as np +from astropy.table import Table + +from desispec.frame import Frame +from desispec.io import read_xytraceset + +class TestBadColumn(unittest.TestCase): + + # setUpClass runs once at the start before any tests + @classmethod + def setUpClass(cls): + pass + + # setUpClass runs once at the end after every test + # e.g. to remove files created by setUpClass + @classmethod + def setUpClass(cls): + pass + + # setUp runs before every test, e.g. to reset state + def setUp(self): + pass + + # setUp runs after every test, e.g. to reset state + def tearDown(self): + pass + + def test_flux_bias_function(self): + from desispec.badcolumn import flux_bias_function + + # scalar + bias0 = flux_bias_function(0.0) + bias1 = flux_bias_function(1) + bias10 = flux_bias_function(10) + self.assertTrue(np.isscalar(bias0)) + self.assertTrue(np.isscalar(bias1)) + self.assertLess(bias1, bias0) + self.assertEqual(bias10, 0) + + # vector + bias = flux_bias_function([0.0, 1, 10]) + self.assertEqual(bias[0], bias0) + self.assertEqual(bias[1], bias1) + self.assertEqual(bias[2], bias10) + + + def test_compute_badcolumn_mask(self): + from desispec.badcolumn import ( + compute_badcolumn_specmask, compute_badcolumn_fibermask, add_badcolumn_mask) + + #- Read a PSF and trim to just one bundle for faster testing + psffile = importlib.resources.files('desispec').joinpath('test/data/ql/psf-r0.fits') + xy = read_xytraceset(psffile) + nspec = 25 + xy = xy[0:nspec] + + ny = xy.npix_y + wave = np.arange(xy.wavemin, xy.wavemax) + nwave = len(wave) + minx = int(np.min(xy.x_vs_wave(0, wave))) + nx = int(np.max(xy.x_vs_wave(xy.nspec-1, wave)) + minx) + + flux = np.zeros( (nspec, nwave) ) + ivar = np.ones( (nspec, nwave) ) + fibermap = Table() + fibermap['FIBER'] = np.arange(nspec) + fibermap['FIBERSTATUS'] = np.zeros(nspec, dtype=np.uint32) + frame = Frame(wave, flux, ivar, fibermap=fibermap, meta=dict(CAMERA='r0')) + + badcol = nx//2 + badcol_table = Table() + badcol_table['COLUMN'] = [badcol,] + badcol_table['ELEC_PER_SEC'] = [1.,] + + specmask = compute_badcolumn_specmask(frame, xy, badcol_table) + self.assertEqual(specmask.shape, (nspec,nwave)) + + impacted_fibers = np.where(np.any(specmask, axis=1))[0] + for i in impacted_fibers: + #- flagged trace comes within 3 columns of the bad column + dx = xy.x_vs_wave(i, wave) - badcol + self.assertLess(np.min(np.abs(dx)), 3) + + #- masking at the fiber level requires a certain fraction of wavelenghts to be masked + fibermask = compute_badcolumn_fibermask(specmask, camera_arm='r', threshold_specfrac=0.5) + masked_fibers = np.where(fibermask != 0)[0] + for i in range(nspec): + frac_masked = np.sum(specmask[i]>0) / nwave + if frac_masked >= 0.5: + self.assertIn(i, masked_fibers) + else: + self.assertNotIn(i, masked_fibers) + + #- upper-case ok + fibermask = compute_badcolumn_fibermask(specmask, camera_arm='R', threshold_specfrac=0.5) + + #- but camera must be b,r,z,B,R,Z + with self.assertRaises(ValueError): + fibermask = compute_badcolumn_fibermask(specmask, camera_arm='Q') + + #- Directly update frame mask + self.assertTrue(np.all(frame.fibermap['FIBERSTATUS'] == 0)) + self.assertTrue(np.all(frame.mask == 0)) + add_badcolumn_mask(frame, xy, badcol_table) + self.assertTrue(np.all(frame.mask == specmask)) + self.assertTrue(np.all(frame.fibermap['FIBERSTATUS'] == fibermask)) + + #- Set a mask if it isn't already there + frame.mask = None + add_badcolumn_mask(frame, xy, badcol_table) + self.assertTrue(np.all(frame.mask == specmask)) + + #- len-0 badcol_table ok + frame.mask *= 0 + frame.fibermap['FIBERSTATUS'] *= 0 + add_badcolumn_mask(frame, xy, badcol_table[0:0]) + self.assertTrue(np.all(frame.mask == 0)) + self.assertTrue(np.all(frame.fibermap['FIBERSTATUS'] == 0)) + + From 5963e9f47485227ecddd9f874b653487f0a35dd2 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 15:09:28 -0700 Subject: [PATCH 06/27] correct tearDownClass, simplify --- py/desispec/test/test_example.py | 29 +++++++++-------------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/py/desispec/test/test_example.py b/py/desispec/test/test_example.py index 91b23ebe5..80323fe4b 100644 --- a/py/desispec/test/test_example.py +++ b/py/desispec/test/test_example.py @@ -11,33 +11,22 @@ class TestExample(unittest.TestCase): # setUpClass runs once at the start before any tests @classmethod def setUpClass(cls): - cls.blat = 1 + pass - # setUpClass runs once at the end after every test - # e.g. to remove files created by setUpClass + # tearDownClass runs once at the end after every test @classmethod - def setUpClass(cls): - cls.blat = 1 + def tearDownClass(cls): + pass - # setUp runs before every test, e.g. to reset state + # setUp runs before every test def setUp(self): - self.foo = 2 + pass - # setUp runs after every test, e.g. to reset state + # setUp runs after every test def tearDown(self): pass def test_blat(self): - self.assertEqual(self.blat, 1) - - def test_foo(self): - self.assertEqual(self.foo, 2) - self.foo *= 2 - self.assertEqual(self.foo, 4) - - def test_foo_again(self): - #- even though test_foo changed self.foo, self.setUp() should reset it - self.assertEqual(self.foo, 2) - self.foo *= 2 - self.assertEqual(self.foo, 4) + self.assertEqual(1, 1) + From a0bdab1e72407fb89c4572117b56708786fafbd8 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 15:10:32 -0700 Subject: [PATCH 07/27] remove unused test setup boilerplate --- py/desispec/test/test_badcolumn.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/py/desispec/test/test_badcolumn.py b/py/desispec/test/test_badcolumn.py index b6d68b907..f17352946 100644 --- a/py/desispec/test/test_badcolumn.py +++ b/py/desispec/test/test_badcolumn.py @@ -13,25 +13,6 @@ class TestBadColumn(unittest.TestCase): - # setUpClass runs once at the start before any tests - @classmethod - def setUpClass(cls): - pass - - # setUpClass runs once at the end after every test - # e.g. to remove files created by setUpClass - @classmethod - def setUpClass(cls): - pass - - # setUp runs before every test, e.g. to reset state - def setUp(self): - pass - - # setUp runs after every test, e.g. to reset state - def tearDown(self): - pass - def test_flux_bias_function(self): from desispec.badcolumn import flux_bias_function From a110c87617df5c6bf551ce6c7ea0f80f11688b7f Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 15:48:10 -0700 Subject: [PATCH 08/27] compute_efftime return numpy arrays, not astropy Columns --- py/desispec/efftime.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/py/desispec/efftime.py b/py/desispec/efftime.py index 76fec89c9..f899a834e 100644 --- a/py/desispec/efftime.py +++ b/py/desispec/efftime.py @@ -76,19 +76,20 @@ def compute_efftime(table, 1.0 + sky_rdn / sky_nom + fflux_backup / sky_nom ) # AR effective exposure time - efftime_dark = ( + # SB cast astropy Column to numpy array + efftime_dark = np.asarray( exptime * (fiberfac_elg / airfac) ** 2 * (sky_nom / effsky_dark) / ebvfac ** 2 ) - efftime_bright = ( + efftime_bright = np.asarray( exptime * (fiberfac_bgs / airfac) ** 2 * (sky_nom / effsky_bright) / ebvfac ** 2 ) - efftime_backup = ( + efftime_backup = np.asarray( exptime * (fiberfac_psf / airfac) ** 2 * (sky_nom / effsky_backup) From 861b4768b559a1338f53bbeb53a154abcb252081 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 15:48:26 -0700 Subject: [PATCH 09/27] add compute_efftime tests --- py/desispec/test/test_efftime.py | 55 ++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 py/desispec/test/test_efftime.py diff --git a/py/desispec/test/test_efftime.py b/py/desispec/test/test_efftime.py new file mode 100644 index 000000000..aea2266a6 --- /dev/null +++ b/py/desispec/test/test_efftime.py @@ -0,0 +1,55 @@ +""" +test desispec.efftime +""" + +import unittest +import numpy as np +from astropy.table import Table + +from desispec.efftime import compute_efftime + +class TestEffTime(unittest.TestCase): + + # setUpClass runs once at the start before any tests + @classmethod + def setUpClass(cls): + #- nominal values from https://desi.lbl.gov/trac/wiki/SurveyOps/SurveySpeed + t = Table() + n = 1 + exptime = 1000 + t['EXPTIME'] = np.ones(n) * exptime + t['SKY_MAG_R_SPEC'] = 21.0707 + t['EBV'] = 0.0 + t['TRANSPARENCY_GFA'] = 1.0 + t['AIRMASS'] = 1.0 + t['FIBERFAC_GFA'] = 1.0 + t['FIBERFAC_ELG_GFA'] = 1.0 + t['FIBERFAC_BGS_GFA'] = 1.0 + t['FIBER_FRACFLUX_GFA'] = 0.582 + t['FIBER_FRACFLUX_ELG_GFA'] = 0.424 + t['FIBER_FRACFLUX_BGS_GFA'] = 0.195 + + cls.reference = t + + def test_efftime(self): + t = self.reference.copy() + exptime = t['EXPTIME'][0] + + #- reference values have some rounding, so only compare to 1e-4 + efftime_dark, efftime_bright, efftime_backup = compute_efftime(t) + self.assertAlmostEqual(efftime_dark[0]/exptime, 1, places=4) + self.assertAlmostEqual(efftime_bright[0]/exptime, 1, places=4) + self.assertAlmostEqual(efftime_backup[0]/exptime, 1, places=4) + + #- half the transparency = half the signal but the same background + #- efftime is 1/4 if S/N = S/sqrt(B) + t['TRANSPARENCY_GFA'] = 0.5 + t['FIBERFAC_GFA'] = 0.5 + t['FIBERFAC_ELG_GFA'] = 0.5 + t['FIBERFAC_BGS_GFA'] = 0.5 + efftime_dark, efftime_bright, efftime_backup = compute_efftime(t) + self.assertAlmostEqual(efftime_dark[0]/exptime, 0.25, places=4) + self.assertAlmostEqual(efftime_bright[0]/exptime, 0.25, places=4) + self.assertAlmostEqual(efftime_backup[0]/exptime, 0.25, places=4) + + From 067e4e7bf6c5c751402b73fb3d7b15361749b67d Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 16:22:22 -0700 Subject: [PATCH 10/27] avoid circular import image_model:io:preproc:image_model --- py/desispec/preproc.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/py/desispec/preproc.py b/py/desispec/preproc.py index e396e9d9b..412d4ca55 100644 --- a/py/desispec/preproc.py +++ b/py/desispec/preproc.py @@ -28,7 +28,6 @@ from desispec.io import read_fiberflat, shorten_filename, findfile from desispec.io.util import addkeys from desispec.maskedmedian import masked_median -from desispec.image_model import compute_image_model from desispec.util import header2night def get_amp_ids(header): @@ -1434,6 +1433,9 @@ def preproc(rawimage, header, primary_header, bias=True, dark=True, pixflat=True if model_variance : + #- deferred import to avoid circular import + from desispec.image_model import compute_image_model + psf = None if psf_filename is None : psf_filename = cfinder.findfile("PSF") From f13310bed6492271b48a929645fbb0982289f1c7 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 16:36:59 -0700 Subject: [PATCH 11/27] set image.meta['CAMERA'] --- py/desispec/image.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/py/desispec/image.py b/py/desispec/image.py index 8bea670e3..8ebd4ab47 100644 --- a/py/desispec/image.py +++ b/py/desispec/image.py @@ -8,6 +8,7 @@ import numpy as np from desispec.maskbits import ccdmask from desispec import util +from desiutil.log import get_logger class Image(object): def __init__(self, pix, ivar, mask=None, readnoise=0.0, camera='unknown', @@ -25,6 +26,7 @@ def __init__(self, pix, ivar, mask=None, readnoise=0.0, camera='unknown', camera : e.g. 'b0', 'r1', 'z9' meta : dict-like metadata key/values, e.g. from FITS header """ + log = get_logger() if pix.ndim != 2: raise ValueError('pix must be 2D, not {}D'.format(pix.ndim)) if pix.shape != ivar.shape: @@ -45,6 +47,16 @@ def __init__(self, pix, ivar, mask=None, readnoise=0.0, camera='unknown', self.readnoise = readnoise self.camera = camera + #- set meta['CAMERA'] if camera is known + if self.meta is None: + self.meta = dict(CAMERA=camera) + elif 'CAMERA' not in self.meta: + self.meta['CAMERA'] = camera + elif (camera != 'unknown') and (self.meta['CAMERA'] != camera): + log.warning(f"Overriding {meta['CAMERA']=} with {camera=}") + self.meta['CAMERA'] = camera + + #- Allow image slicing def __getitem__(self, xyslice): From c5cfa86b6e0e9a68aaccd8a390b3d84d50ce3b5e Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Thu, 17 Oct 2024 16:37:31 -0700 Subject: [PATCH 12/27] add test_image_model --- py/desispec/test/test_image_model.py | 62 ++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 py/desispec/test/test_image_model.py diff --git a/py/desispec/test/test_image_model.py b/py/desispec/test/test_image_model.py new file mode 100644 index 000000000..6042c1203 --- /dev/null +++ b/py/desispec/test/test_image_model.py @@ -0,0 +1,62 @@ +""" +test desispec.image_model +""" + +import unittest +import importlib.resources + +import numpy as np + +from desispec.image_model import compute_image_model +from desispec.io import read_xytraceset +from desispec.image import Image +from desispec.fiberflat import FiberFlat + +class TestImageModel(unittest.TestCase): + + # setUpClass runs once at the start before any tests + @classmethod + def setUpClass(cls): + + #- Read PSF and trim to just one bundle + cls.camera = camera = 'r0' + psffile = importlib.resources.files('desispec').joinpath(f'test/data/ql/psf-{camera}.fits') + xy = read_xytraceset(psffile) + nspec = 25 + xy = xy[0:nspec] + + wave = np.linspace(xy.wavemin, xy.wavemax) + nwave = len(wave) + xmin = int(np.min(xy.x_vs_wave(0, wave))) + cls.nx = int(np.max(xy.x_vs_wave(xy.nspec-1, wave) + xmin)) + cls.ny = xy.npix_y + cls.xy = xy + + ff = np.ones((nspec, nwave)) + ffivar = np.ones((nspec, nwave)) + cls.fiberflat = FiberFlat(wave, ff, ffivar) + + # tearDownClass runs once at the end after every test + @classmethod + def tearDownClass(cls): + pass + + # setUp runs before every test + def setUp(self): + pass + + # setUp runs after every test + def tearDown(self): + pass + + def test_image_model(self): + pix = np.random.normal(size=(self.ny, self.nx)) + ivar = np.ones((self.ny, self.nx)) + img = Image(pix, ivar, readnoise=1.0, camera=self.camera) + + model = compute_image_model(img, self.xy, fiberflat=self.fiberflat) + + + + + From 7c33e089b702612a85f62f3d42b0e253f6baa63a Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Fri, 18 Oct 2024 15:27:49 -0700 Subject: [PATCH 13/27] tweak test_efftime --- py/desispec/test/test_efftime.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/py/desispec/test/test_efftime.py b/py/desispec/test/test_efftime.py index aea2266a6..c5617f628 100644 --- a/py/desispec/test/test_efftime.py +++ b/py/desispec/test/test_efftime.py @@ -18,7 +18,8 @@ def setUpClass(cls): n = 1 exptime = 1000 t['EXPTIME'] = np.ones(n) * exptime - t['SKY_MAG_R_SPEC'] = 21.0707 + sky_nom = 3.73 # nMgy/arcsec**2 + t['SKY_MAG_R_SPEC'] = 22.5 - 2.5*np.log10(sky_nom) # 21.07072792047828 t['EBV'] = 0.0 t['TRANSPARENCY_GFA'] = 1.0 t['AIRMASS'] = 1.0 @@ -37,9 +38,9 @@ def test_efftime(self): #- reference values have some rounding, so only compare to 1e-4 efftime_dark, efftime_bright, efftime_backup = compute_efftime(t) - self.assertAlmostEqual(efftime_dark[0]/exptime, 1, places=4) - self.assertAlmostEqual(efftime_bright[0]/exptime, 1, places=4) - self.assertAlmostEqual(efftime_backup[0]/exptime, 1, places=4) + self.assertAlmostEqual(efftime_dark[0], exptime) + self.assertAlmostEqual(efftime_bright[0], exptime) + self.assertAlmostEqual(efftime_backup[0], exptime) #- half the transparency = half the signal but the same background #- efftime is 1/4 if S/N = S/sqrt(B) @@ -48,8 +49,8 @@ def test_efftime(self): t['FIBERFAC_ELG_GFA'] = 0.5 t['FIBERFAC_BGS_GFA'] = 0.5 efftime_dark, efftime_bright, efftime_backup = compute_efftime(t) - self.assertAlmostEqual(efftime_dark[0]/exptime, 0.25, places=4) - self.assertAlmostEqual(efftime_bright[0]/exptime, 0.25, places=4) - self.assertAlmostEqual(efftime_backup[0]/exptime, 0.25, places=4) + self.assertAlmostEqual(efftime_dark[0], exptime/4) + self.assertAlmostEqual(efftime_bright[0], exptime/4) + self.assertAlmostEqual(efftime_backup[0], exptime/4) From 0027f14d1b8d82801319e51688b52afb43b6dda4 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Fri, 18 Oct 2024 15:28:06 -0700 Subject: [PATCH 14/27] add desispec.workflow.timing tests --- py/desispec/test/test_workflow_timing.py | 56 ++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 py/desispec/test/test_workflow_timing.py diff --git a/py/desispec/test/test_workflow_timing.py b/py/desispec/test/test_workflow_timing.py new file mode 100644 index 000000000..0e3dda19b --- /dev/null +++ b/py/desispec/test/test_workflow_timing.py @@ -0,0 +1,56 @@ +""" +Test desispec.workflow.timing + +These are primarily "does it run" test to catch API changes, +i.e. not "are they correct?" tests. +""" + +import os, re +import unittest + +from desispec.workflow import timing + +class TestWorkflowTiming(unittest.TestCase): + + @classmethod + def setUpClass(cls): + cls.origTZ = os.getenv('TZ') + + @classmethod + def tearDownClass(cls): + if cls.origTZ is None: + if 'TZ' in os.environ: + del os.environ['TZ'] + else: + os.environ['TZ'] = cls.origTZ + + def test_workflow_timing(self): + night = timing.what_night_is_it() + self.assertIsInstance(night, int) + self.assertTrue(str(night).startswith('20')) + self.assertEqual(len(str(night)), 8) + + start_time = timing.get_nightly_start_time() + self.assertGreaterEqual(start_time, 0) + self.assertLess(start_time, 24) + + end_time = timing.get_nightly_end_time() + self.assertGreaterEqual(end_time, 0) + self.assertLess(end_time, 24) + + os.environ['TZ'] = 'US/California' + timing.ensure_tucson_time() + self.assertEqual(os.environ['TZ'], 'US/Arizona') + del os.environ['TZ'] + timing.ensure_tucson_time() + self.assertEqual(os.environ['TZ'], 'US/Arizona') + + timestr = timing.nersc_format_datetime() + timestr = timing.nersc_end_time() + yesno = timing.during_operating_hours() + + # NOT Tested: timing.wait_for_cals because that has time-dependent behavior + # and long sleeps if it doesn't find the files it is looking for. + + + From 9b3d4c0b77f57e8beafcd3ab60df968398fdbea7 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Tue, 5 Nov 2024 15:49:22 -0800 Subject: [PATCH 15/27] add failing test; need to debug why --- py/desispec/test/test_fiberflat.py | 87 +++++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 1 deletion(-) diff --git a/py/desispec/test/test_fiberflat.py b/py/desispec/test/test_fiberflat.py index d9e7faf94..a67ec272f 100644 --- a/py/desispec/test/test_fiberflat.py +++ b/py/desispec/test/test_fiberflat.py @@ -13,12 +13,14 @@ import numpy as np import scipy.sparse +from astropy.table import Table from desispec.maskbits import specmask from desispec.resolution import Resolution from desispec.frame import Frame from desispec.fiberflat import FiberFlat from desispec.fiberflat import compute_fiberflat, apply_fiberflat +from desispec.fiberflat import average_fiberflat, autocalib_fiberflat, gradient_correction from desiutil.log import get_logger from desispec.io import write_frame import desispec.io as io @@ -43,6 +45,18 @@ def _get_data(): return wave, flux, ivar, mask +def _get_fibermap(petal, nspec): + fibermap = Table() + fibermap['FIBER'] = petal*500 + np.arange(nspec) + alpha = 2*np.pi/10 # angle of one petal in radians + theta = np.random.uniform(petal*alpha, (petal+1)*alpha, size=nspec) + r = np.random.uniform(0,400, size=nspec) + fibermap['FIBERASSIGN_X'] = r*np.cos(theta) + fibermap['FIBERASSIGN_Y'] = r*np.sin(theta) + fibermap['FIBERSTATUS'] = 0 + + return fibermap + class TestFiberFlat(unittest.TestCase): @@ -236,6 +250,14 @@ def test_throughput_resolution(self): diff = (ff.fiberflat[4]*1.2 - ff.fiberflat[mid]) self.assertLess(np.max(np.abs(diff)), accuracy) + #- Add outliers and ensure that it doesn't significantly change + frame.flux[0][0] = 1000 + frame.flux[1][10] = 2000 + frame.flux[2][20] = 3000 + ff2 = compute_fiberflat(frame,accuracy=accuracy) + self.assertTrue(np.allclose(ff.fiberflat, ff2.fiberflat)) + + def test_apply_fiberflat(self): '''test apply_fiberflat interface and changes to flux and mask''' wave = np.arange(5000, 5050) @@ -244,6 +266,7 @@ def test_apply_fiberflat(self): flux = np.random.uniform(size=(nspec, nwave)) ivar = np.ones_like(flux) frame = Frame(wave, flux, ivar, spectrograph=0, meta=dict(CAMERA='x0')) + frame.meta['HELIOCOR'] = 1.0 #- breaks test due to interpolation over bad ff fiberflat = np.ones_like(flux) ffivar = 2*np.ones_like(flux) @@ -376,7 +399,8 @@ def setUp(self): self.ivar = np.ones(self.fiberflat.shape) self.mask = np.zeros(self.fiberflat.shape, dtype=np.uint32) self.meanspec = np.random.uniform(size=self.nwave) - self.ff = FiberFlat(self.wave, self.fiberflat, self.ivar, self.mask, self.meanspec) + self.header = dict(blat=1, foo=2) + self.ff = FiberFlat(self.wave, self.fiberflat, self.ivar, self.mask, self.meanspec, header=self.header) def test_init(self): for key in ('wave', 'fiberflat', 'ivar', 'mask', 'meanspec'): @@ -420,3 +444,64 @@ def test_slice(self): x = self.ff[1:2] x = self.ff[[1,2,3]] x = self.ff[self.ff.fibers<3] + + def test_average_fiberflat(self): + ff = average_fiberflat([self.ff, self.ff]) + self.assertTrue(np.allclose(ff.fiberflat, self.ff.fiberflat)) + self.assertTrue(np.allclose(ff.ivar, self.ff.ivar*2)) + + ff = average_fiberflat([self.ff, self.ff, self.ff, self.ff]) + self.assertTrue(np.allclose(ff.fiberflat, self.ff.fiberflat)) + self.assertTrue(np.allclose(ff.ivar, self.ff.ivar*4*2/np.pi)) # 2/pi due to median vs. mean penalty + + #- boundary cases of 1 or 0 inputs + ff = average_fiberflat([self.ff,]) + self.assertIs(ff, self.ff) + + with self.assertRaises(ValueError): + ff = average_fiberflat([]) + + def test_autocalib_fiberflat(self): + fiberflats = list() + expid = 1000 + for petal in range(10): + fibermap = _get_fibermap(petal, self.nspec) + for i in range(3): + ff = copy.deepcopy(self.ff) + ff.header['EXPID'] = expid + expid += 1 + ff.header['CAMERA'] = f'r{petal}' + ff.fibermap = fibermap + fiberflats.append(ff) + + ff = autocalib_fiberflat(fiberflats) + + def test_gradient_correction(self): + ref_fiberflats = dict() + tilted_fiberflats = dict() + for petal in range(10): + fibermap = _get_fibermap(petal, self.nspec) + camera = f'r{petal}' + + ff = copy.deepcopy(self.ff) + ff.fiberflat[:,:] = 1.0 + ff.header['CAMERA'] = f'r{petal}' + ff.fibermap = fibermap + ref_fiberflats[camera] = ff + + ff = copy.deepcopy(self.ff) + ff.fiberflat[:,:] = 1.0 + ff.header['CAMERA'] = f'r{petal}' + ff.fibermap = fibermap + #- add +/- 5% tilt edge-to-edge + tilt = 1 + 0.05*fibermap['FIBERASSIGN_X']/400 + # tilt = np.ones(self.nspec) + for i in range(ff.fiberflat.shape[0]): + ff.fiberflat[i] *= tilt[i] + + tilted_fiberflats[camera] = ff + + final_fiberflats = gradient_correction(tilted_fiberflats, ref_fiberflats) + + for cam in final_fiberflats: + self.assertTrue(np.allclose(final_fiberflats[cam].fiberflat, ref_fiberflats[cam].fiberflat)) From 07ef972eb91d001e6018e73480a884a972a3ba66 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Tue, 3 Dec 2024 16:02:26 -0800 Subject: [PATCH 16/27] update fiberflat tests --- py/desispec/test/test_fiberflat.py | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/py/desispec/test/test_fiberflat.py b/py/desispec/test/test_fiberflat.py index a67ec272f..b7f12189d 100644 --- a/py/desispec/test/test_fiberflat.py +++ b/py/desispec/test/test_fiberflat.py @@ -270,24 +270,24 @@ def test_apply_fiberflat(self): fiberflat = np.ones_like(flux) ffivar = 2*np.ones_like(flux) - ffmask = np.zeros_like(flux) + ffmask = np.zeros_like(flux, dtype=np.uint32) fiberflat[0] *= 0.8 fiberflat[1] *= 1.2 fiberflat[2, 0:10] = 0 #- bad fiberflat ffivar[2, 10:20] = 0 #- bad fiberflat ffmask[2, 20:30] = 1 #- bad fiberflat - ff = FiberFlat(wave, fiberflat, ffivar) + ff = FiberFlat(wave, fiberflat, ffivar, mask=ffmask) + + self.assertTrue(np.all(ff.fiberflat == fiberflat)) + self.assertTrue(np.all(ff.ivar == ffivar)) + self.assertTrue(np.all(ff.mask == ffmask)) + #- Test applying fiberflat origframe = copy.deepcopy(frame) apply_fiberflat(frame, ff) - #- was fiberflat applied? - self.assertTrue(np.all(frame.flux[0] == origframe.flux[0]/0.8)) - self.assertTrue(np.all(frame.flux[1] == origframe.flux[1]/1.2)) - self.assertTrue(np.all(frame.flux[2] == origframe.flux[2])) - - #- did mask get set? + #- did mask get set for bad fiberflat? ii = (ff.fiberflat == 0) self.assertTrue(np.all((frame.mask[ii] & specmask.BADFIBERFLAT) != 0)) ii = (ff.ivar == 0) @@ -295,11 +295,18 @@ def test_apply_fiberflat(self): ii = (ff.mask != 0) self.assertTrue(np.all((frame.mask[ii] & specmask.BADFIBERFLAT) != 0)) + #- was fiberflat applied for non-masked pixels? + ok = (frame.mask & specmask.BADFIBERFLAT) == 0 + self.assertTrue(np.all(frame.flux[0][ok[0]] == origframe.flux[0][ok[0]]/0.8)) + self.assertTrue(np.all(frame.flux[1][ok[1]] == origframe.flux[1][ok[1]]/1.2)) + self.assertTrue(np.all(frame.flux[2][ok[2]] == origframe.flux[2][ok[2]])) + #- Should fail if frame and ff don't have a common wavelength grid frame.wave = frame.wave + 0.1 with self.assertRaises(ValueError): apply_fiberflat(frame, ff) + def test_apply_fiberflat_ivar(self): '''test error propagation in apply_fiberflat''' wave = np.arange(5000, 5010) @@ -310,13 +317,13 @@ def test_apply_fiberflat_ivar(self): origframe = Frame(wave, flux, ivar, spectrograph=0, meta=dict(CAMERA='x0')) fiberflat = np.ones_like(flux) - ffmask = np.zeros_like(flux) + ffmask = np.zeros_like(flux, dtype=np.uint32) fiberflat[0] *= 0.5 fiberflat[1] *= 1.5 #- ff with essentially no error ffivar = 1e20 * np.ones_like(flux) - ff = FiberFlat(wave, fiberflat, ffivar) + ff = FiberFlat(wave, fiberflat, ffivar, mask=ffmask) frame = copy.deepcopy(origframe) apply_fiberflat(frame, ff) self.assertTrue(np.allclose(frame.ivar, fiberflat**2)) From 08a6ffe41f9ce49de9c556f1170b1391fbaddb2f Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Tue, 3 Dec 2024 16:13:57 -0800 Subject: [PATCH 17/27] rowbyrow robust to CAMERA=unknown --- py/desispec/qproc/rowbyrowextract.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/py/desispec/qproc/rowbyrowextract.py b/py/desispec/qproc/rowbyrowextract.py index cc8548ef4..aa8ec1d3c 100644 --- a/py/desispec/qproc/rowbyrowextract.py +++ b/py/desispec/qproc/rowbyrowextract.py @@ -9,6 +9,7 @@ import numba from scipy import special, linalg from specter.util import custom_hermitenorm, custom_erf +from desispec.qproc.qframe import QFrame from desispec import qproc, io from desiutil.log import log @@ -369,13 +370,13 @@ def extract(image, psf, blocksize=25, fibermap=None, nspec=500, log.warning("setting up a fibermap to save the FIBER identifiers") fibermap = io.fibermap.empty_fibermap(nspec) # 2% of time fibermap["FIBER"] = np.arange(nspec) - if (image.meta is not None) and ('CAMERA' in image.meta): + if (image.meta is not None) and ('CAMERA' in image.meta) and (image.meta['CAMERA'] != 'unknown'): petal = int(image.meta['CAMERA'][1]) fibermap["FIBER"] += petal*500 else: fibermap = fibermap[:nspec] - out = qproc.qframe.QFrame(wave, outspec, 1/outvar, mask=None, + out = QFrame(wave, outspec, 1/outvar, mask=None, fibers=fibermap['FIBER'], meta=image.meta, fibermap=fibermap) if return_model: From 2406a0cbde5f42b33134de88be2668cd9bdb3a99 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 13:14:37 -0800 Subject: [PATCH 18/27] restore accidentally removed .coveragerc --- .coveragerc | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 000000000..7154e2f80 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,28 @@ +[run] +source = py/desispec +omit = + py/desispec/_version.py + py/desispec/conftest* + py/desispec/cython_version* + py/desispec/setup_package* + py/desispec/*/setup_package* + py/desispec/*/*/setup_package* + py/desispec/sphinx/* + py/desispec/test/* + py/desispec/*/test/* + py/desispec/*/*/test/* + +[report] +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about packages we have installed + except ImportError + + # Don't complain if tests don't hit assertions + raise AssertionError + raise NotImplementedError + + # Don't complain about script hooks + ### def main\(.*\): From 2f81a1033705fb70b78298b49010007236715eb6 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 14:02:07 -0800 Subject: [PATCH 19/27] pytest only py/desispec/test directory --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 646f70e02..64801e725 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -46,7 +46,7 @@ jobs: python -m pip install --no-deps --force-reinstall --ignore-installed 'fitsio${{ matrix.fitsio-version }}' svn export https://desi.lbl.gov/svn/code/desimodel/${DESIMODEL_DATA}/data - name: Run the test - run: DESIMODEL=$(pwd) pytest + run: DESIMODEL=$(pwd) pytest py/desispec/test coverage: name: Test coverage From 4eaa95f6a5b83bb5b66c311064d957a3c785e87e Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 14:19:57 -0800 Subject: [PATCH 20/27] remove deprecated docs too; fix github coverage test --- .github/workflows/python-package.yml | 2 +- {doc => deprecated/doc}/pipeline.rst | 0 {doc => deprecated/doc}/qa.rst | 0 .../py}/desispec/scripts/night.py | 0 .../py}/desispec/scripts/pipe.py | 0 .../py}/desispec/scripts/quicklook.py | 0 doc/api.rst | 198 ------------------ doc/index.rst | 2 - 8 files changed, 1 insertion(+), 201 deletions(-) rename {doc => deprecated/doc}/pipeline.rst (100%) rename {doc => deprecated/doc}/qa.rst (100%) rename {py => deprecated/py}/desispec/scripts/night.py (100%) rename {py => deprecated/py}/desispec/scripts/pipe.py (100%) rename {py => deprecated/py}/desispec/scripts/quicklook.py (100%) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 64801e725..06a11271d 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -85,7 +85,7 @@ jobs: python -m pip install --no-deps --force-reinstall --ignore-installed 'fitsio${{ matrix.fitsio-version }}' svn export https://desi.lbl.gov/svn/code/desimodel/${DESIMODEL_DATA}/data - name: Run the test with coverage - run: DESIMODEL=$(pwd) pytest --cov + run: DESIMODEL=$(pwd) pytest --cov py/desispec/test - name: Coveralls env: COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} diff --git a/doc/pipeline.rst b/deprecated/doc/pipeline.rst similarity index 100% rename from doc/pipeline.rst rename to deprecated/doc/pipeline.rst diff --git a/doc/qa.rst b/deprecated/doc/qa.rst similarity index 100% rename from doc/qa.rst rename to deprecated/doc/qa.rst diff --git a/py/desispec/scripts/night.py b/deprecated/py/desispec/scripts/night.py similarity index 100% rename from py/desispec/scripts/night.py rename to deprecated/py/desispec/scripts/night.py diff --git a/py/desispec/scripts/pipe.py b/deprecated/py/desispec/scripts/pipe.py similarity index 100% rename from py/desispec/scripts/pipe.py rename to deprecated/py/desispec/scripts/pipe.py diff --git a/py/desispec/scripts/quicklook.py b/deprecated/py/desispec/scripts/quicklook.py similarity index 100% rename from py/desispec/scripts/quicklook.py rename to deprecated/py/desispec/scripts/quicklook.py diff --git a/doc/api.rst b/doc/api.rst index 559f34615..41cec2d4d 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -152,9 +152,6 @@ desispec API .. automodule:: desispec.io.photo :members: -.. automodule:: desispec.io.qa - :members: - .. automodule:: desispec.io.raw :members: @@ -212,84 +209,6 @@ desispec API .. automodule:: desispec.parallel :members: -.. automodule:: desispec.pipeline - :members: - -.. automodule:: desispec.pipeline.control - :members: - -.. automodule:: desispec.pipeline.db - :members: - -.. automodule:: desispec.pipeline.defs - :members: - -.. automodule:: desispec.pipeline.plan - :members: - -.. automodule:: desispec.pipeline.prod - :members: - -.. automodule:: desispec.pipeline.run - :members: - -.. automodule:: desispec.pipeline.scriptgen - :members: - -.. automodule:: desispec.pipeline.tasks - :members: - -.. automodule:: desispec.pipeline.tasks.base - :members: - -.. automodule:: desispec.pipeline.tasks.cframe - :members: - -.. automodule:: desispec.pipeline.tasks.extract - :members: - -.. automodule:: desispec.pipeline.tasks.fiberflat - :members: - -.. automodule:: desispec.pipeline.tasks.fiberflatnight - :members: - -.. automodule:: desispec.pipeline.tasks.fibermap - :members: - -.. automodule:: desispec.pipeline.tasks.fluxcalib - :members: - -.. automodule:: desispec.pipeline.tasks.preproc - :members: - -.. automodule:: desispec.pipeline.tasks.psf - :members: - -.. automodule:: desispec.pipeline.tasks.psfnight - :members: - -.. automodule:: desispec.pipeline.tasks.qadata - :members: - -.. automodule:: desispec.pipeline.tasks.rawdata - :members: - -.. automodule:: desispec.pipeline.tasks.redshift - :members: - -.. automodule:: desispec.pipeline.tasks.sky - :members: - -.. automodule:: desispec.pipeline.tasks.spectra - :members: - -.. automodule:: desispec.pipeline.tasks.starfit - :members: - -.. automodule:: desispec.pipeline.tasks.traceshift - :members: - .. automodule:: desispec.pixflat :members: @@ -299,45 +218,6 @@ desispec API .. automodule:: desispec.preproc :members: -.. automodule:: desispec.qa - :members: - -.. automodule:: desispec.qa.html - :members: - -.. automodule:: desispec.qa.qa_brick - :members: - -.. automodule:: desispec.qa.qa_exposure - :members: - -.. automodule:: desispec.qa.qa_frame - :members: - -.. automodule:: desispec.qa.qa_multiexp - :members: - -.. automodule:: desispec.qa.qa_night - :members: - -.. automodule:: desispec.qa.qa_plots - :members: - -.. automodule:: desispec.qa.qa_plots_ql - :members: - -.. automodule:: desispec.qa.qa_prod - :members: - -.. automodule:: desispec.qa.qa_quicklook - :members: - -.. automodule:: desispec.qa.qalib - :members: - -.. automodule:: desispec.qa.utils - :members: - .. automodule:: desispec.qproc :members: @@ -365,60 +245,6 @@ desispec API .. automodule:: desispec.qproc.rowbyrowextract :members: -.. automodule:: desispec.quicklook - :members: - -.. automodule:: desispec.quicklook.arcprocess - :members: - -.. automodule:: desispec.quicklook.merger - :members: - -.. automodule:: desispec.quicklook.palib - :members: - -.. automodule:: desispec.quicklook.pas - :members: - -.. automodule:: desispec.quicklook.procalgs - :members: - -.. automodule:: desispec.quicklook.qas - :members: - -.. automodule:: desispec.quicklook.ql_plotlib - :members: - -.. automodule:: desispec.quicklook.qlboxcar - :members: - -.. automodule:: desispec.quicklook.qlconfig - :members: - -.. automodule:: desispec.quicklook.qlexceptions - :members: - -.. automodule:: desispec.quicklook.qlheartbeat - :members: - -.. automodule:: desispec.quicklook.qllogger - :members: - -.. automodule:: desispec.quicklook.qlpsf - :members: - -.. automodule:: desispec.quicklook.qlresolution - :members: - -.. automodule:: desispec.quicklook.quickfiberflat - :members: - -.. automodule:: desispec.quicklook.quicklook - :members: - -.. automodule:: desispec.quicklook.quicksky - :members: - .. automodule:: desispec.resolution :members: @@ -524,15 +350,9 @@ desispec API .. automodule:: desispec.scripts.mergebundles :members: -.. automodule:: desispec.scripts.night - :members: - .. automodule:: desispec.scripts.nightly_bias :members: -.. automodule:: desispec.scripts.pipe - :members: - .. automodule:: desispec.scripts.pipe_exec :members: @@ -560,18 +380,6 @@ desispec API .. automodule:: desispec.scripts.procexp :members: -.. automodule:: desispec.scripts.qa_exposure - :members: - -.. automodule:: desispec.scripts.qa_frame - :members: - -.. automodule:: desispec.scripts.qa_night - :members: - -.. automodule:: desispec.scripts.qa_prod - :members: - .. automodule:: desispec.scripts.qproc :members: @@ -581,9 +389,6 @@ desispec API .. automodule:: desispec.scripts.qsoqn :members: -.. automodule:: desispec.scripts.quicklook - :members: - .. automodule:: desispec.scripts.reformat_exptables :members: @@ -602,9 +407,6 @@ desispec API .. automodule:: desispec.scripts.sky :members: -.. automodule:: desispec.scripts.skysubresid - :members: - .. automodule:: desispec.scripts.specex :members: diff --git a/doc/index.rst b/doc/index.rst index 2ef0ebf57..572abdace 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -10,10 +10,8 @@ Contents overview.rst install.rst - pipeline.rst dev.rst coadd.rst - qa.rst cte-correction.rst changes.rst api.rst From c003ba94bd7adaac1611301d7bb9e3bff8c08f53 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 14:31:01 -0800 Subject: [PATCH 21/27] remove another deprecated pipeline script wrapper --- {py => deprecated/py}/desispec/scripts/pipe_exec.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {py => deprecated/py}/desispec/scripts/pipe_exec.py (100%) diff --git a/py/desispec/scripts/pipe_exec.py b/deprecated/py/desispec/scripts/pipe_exec.py similarity index 100% rename from py/desispec/scripts/pipe_exec.py rename to deprecated/py/desispec/scripts/pipe_exec.py From 67e60316a1f6f6759e804ddb83a1244f439510af Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 14:37:03 -0800 Subject: [PATCH 22/27] whack that mole! --- .github/workflows/python-package.yml | 2 +- doc/api.rst | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 06a11271d..b8bcbd98b 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -85,7 +85,7 @@ jobs: python -m pip install --no-deps --force-reinstall --ignore-installed 'fitsio${{ matrix.fitsio-version }}' svn export https://desi.lbl.gov/svn/code/desimodel/${DESIMODEL_DATA}/data - name: Run the test with coverage - run: DESIMODEL=$(pwd) pytest --cov py/desispec/test + run: DESIMODEL=$(pwd) pytest --cov=desispec py/desispec/test - name: Coveralls env: COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} diff --git a/doc/api.rst b/doc/api.rst index 41cec2d4d..3afaae0d5 100644 --- a/doc/api.rst +++ b/doc/api.rst @@ -353,9 +353,6 @@ desispec API .. automodule:: desispec.scripts.nightly_bias :members: -.. automodule:: desispec.scripts.pipe_exec - :members: - .. automodule:: desispec.scripts.preproc :members: From 2d6fd7cb5eeac83c5687749c9c34d86e1116b78d Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 15:38:12 -0800 Subject: [PATCH 23/27] debugging github tests --- py/desispec/test/test_fiberflat.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/py/desispec/test/test_fiberflat.py b/py/desispec/test/test_fiberflat.py index b7f12189d..a89021e5e 100644 --- a/py/desispec/test/test_fiberflat.py +++ b/py/desispec/test/test_fiberflat.py @@ -502,13 +502,22 @@ def test_gradient_correction(self): ff.fibermap = fibermap #- add +/- 5% tilt edge-to-edge tilt = 1 + 0.05*fibermap['FIBERASSIGN_X']/400 - # tilt = np.ones(self.nspec) + print(f'DEBUG: {petal=} {np.min(tilt)=} {np.max(tilt)=}') + print(f'DEBUG: {petal=} {np.min(ff.fiberflat)=} {np.max(ff.fiberflat)=}') for i in range(ff.fiberflat.shape[0]): ff.fiberflat[i] *= tilt[i] tilted_fiberflats[camera] = ff - final_fiberflats = gradient_correction(tilted_fiberflats, ref_fiberflats) + try: + final_fiberflats = gradient_correction(tilted_fiberflats, ref_fiberflats) + except Exception as err: + for petal in range(10): + camera = f'r{petal}' + ffratio = tilted_fiberflats[camera].fiberflat / ref_fiberflats[camera].fiberflat + print(f'DEBUG: {camera=} {ffratio=}') + + raise err for cam in final_fiberflats: self.assertTrue(np.allclose(final_fiberflats[cam].fiberflat, ref_fiberflats[cam].fiberflat)) From bb88a417cc479eb9314f746567fa57406c0f9eea Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 16:08:58 -0800 Subject: [PATCH 24/27] deprecate desi_quicklook and wrap_desi_night.sh --- {bin => deprecated/bin}/desi_quicklook | 0 {bin => deprecated/bin}/wrap_desi_night.sh | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename {bin => deprecated/bin}/desi_quicklook (100%) rename {bin => deprecated/bin}/wrap_desi_night.sh (100%) diff --git a/bin/desi_quicklook b/deprecated/bin/desi_quicklook similarity index 100% rename from bin/desi_quicklook rename to deprecated/bin/desi_quicklook diff --git a/bin/wrap_desi_night.sh b/deprecated/bin/wrap_desi_night.sh similarity index 100% rename from bin/wrap_desi_night.sh rename to deprecated/bin/wrap_desi_night.sh From 988fe34794c1a1f6a221dc9e7c5f9f2838dca7d0 Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 16:32:34 -0800 Subject: [PATCH 25/27] remove test_fiberflat debugging --- {bin => deprecated/bin}/desi_daily_proc_manager | 0 {bin => deprecated/bin}/desi_night | 0 {bin => deprecated/bin}/desi_pipe | 0 {bin => deprecated/bin}/desi_pipe_exec | 0 {bin => deprecated/bin}/desi_pipe_exec_mpi | 0 {bin => deprecated/bin}/desi_pipe_status | 0 .../py}/desispec/scripts/daily_processing.py | 0 py/desispec/test/test_fiberflat.py | 12 +----------- 8 files changed, 1 insertion(+), 11 deletions(-) rename {bin => deprecated/bin}/desi_daily_proc_manager (100%) rename {bin => deprecated/bin}/desi_night (100%) rename {bin => deprecated/bin}/desi_pipe (100%) rename {bin => deprecated/bin}/desi_pipe_exec (100%) rename {bin => deprecated/bin}/desi_pipe_exec_mpi (100%) rename {bin => deprecated/bin}/desi_pipe_status (100%) rename {py => deprecated/py}/desispec/scripts/daily_processing.py (100%) diff --git a/bin/desi_daily_proc_manager b/deprecated/bin/desi_daily_proc_manager similarity index 100% rename from bin/desi_daily_proc_manager rename to deprecated/bin/desi_daily_proc_manager diff --git a/bin/desi_night b/deprecated/bin/desi_night similarity index 100% rename from bin/desi_night rename to deprecated/bin/desi_night diff --git a/bin/desi_pipe b/deprecated/bin/desi_pipe similarity index 100% rename from bin/desi_pipe rename to deprecated/bin/desi_pipe diff --git a/bin/desi_pipe_exec b/deprecated/bin/desi_pipe_exec similarity index 100% rename from bin/desi_pipe_exec rename to deprecated/bin/desi_pipe_exec diff --git a/bin/desi_pipe_exec_mpi b/deprecated/bin/desi_pipe_exec_mpi similarity index 100% rename from bin/desi_pipe_exec_mpi rename to deprecated/bin/desi_pipe_exec_mpi diff --git a/bin/desi_pipe_status b/deprecated/bin/desi_pipe_status similarity index 100% rename from bin/desi_pipe_status rename to deprecated/bin/desi_pipe_status diff --git a/py/desispec/scripts/daily_processing.py b/deprecated/py/desispec/scripts/daily_processing.py similarity index 100% rename from py/desispec/scripts/daily_processing.py rename to deprecated/py/desispec/scripts/daily_processing.py diff --git a/py/desispec/test/test_fiberflat.py b/py/desispec/test/test_fiberflat.py index a89021e5e..3ce98bd05 100644 --- a/py/desispec/test/test_fiberflat.py +++ b/py/desispec/test/test_fiberflat.py @@ -502,22 +502,12 @@ def test_gradient_correction(self): ff.fibermap = fibermap #- add +/- 5% tilt edge-to-edge tilt = 1 + 0.05*fibermap['FIBERASSIGN_X']/400 - print(f'DEBUG: {petal=} {np.min(tilt)=} {np.max(tilt)=}') - print(f'DEBUG: {petal=} {np.min(ff.fiberflat)=} {np.max(ff.fiberflat)=}') for i in range(ff.fiberflat.shape[0]): ff.fiberflat[i] *= tilt[i] tilted_fiberflats[camera] = ff - try: - final_fiberflats = gradient_correction(tilted_fiberflats, ref_fiberflats) - except Exception as err: - for petal in range(10): - camera = f'r{petal}' - ffratio = tilted_fiberflats[camera].fiberflat / ref_fiberflats[camera].fiberflat - print(f'DEBUG: {camera=} {ffratio=}') - - raise err + final_fiberflats = gradient_correction(tilted_fiberflats, ref_fiberflats) for cam in final_fiberflats: self.assertTrue(np.allclose(final_fiberflats[cam].fiberflat, ref_fiberflats[cam].fiberflat)) From 3f8510258404963e1d7ec6d155b803c73790c06f Mon Sep 17 00:00:00 2001 From: Benjamin Alan Weaver Date: Wed, 4 Dec 2024 17:49:53 -0700 Subject: [PATCH 26/27] move pytest and coverage configuration into setup.cfg --- .coveragerc | 28 ------------------------- .github/workflows/python-package.yml | 4 ++-- setup.cfg | 31 ++++++++++++++++++++++++++++ 3 files changed, 33 insertions(+), 30 deletions(-) delete mode 100644 .coveragerc diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 7154e2f80..000000000 --- a/.coveragerc +++ /dev/null @@ -1,28 +0,0 @@ -[run] -source = py/desispec -omit = - py/desispec/_version.py - py/desispec/conftest* - py/desispec/cython_version* - py/desispec/setup_package* - py/desispec/*/setup_package* - py/desispec/*/*/setup_package* - py/desispec/sphinx/* - py/desispec/test/* - py/desispec/*/test/* - py/desispec/*/*/test/* - -[report] -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about packages we have installed - except ImportError - - # Don't complain if tests don't hit assertions - raise AssertionError - raise NotImplementedError - - # Don't complain about script hooks - ### def main\(.*\): diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index b8bcbd98b..646f70e02 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -46,7 +46,7 @@ jobs: python -m pip install --no-deps --force-reinstall --ignore-installed 'fitsio${{ matrix.fitsio-version }}' svn export https://desi.lbl.gov/svn/code/desimodel/${DESIMODEL_DATA}/data - name: Run the test - run: DESIMODEL=$(pwd) pytest py/desispec/test + run: DESIMODEL=$(pwd) pytest coverage: name: Test coverage @@ -85,7 +85,7 @@ jobs: python -m pip install --no-deps --force-reinstall --ignore-installed 'fitsio${{ matrix.fitsio-version }}' svn export https://desi.lbl.gov/svn/code/desimodel/${DESIMODEL_DATA}/data - name: Run the test with coverage - run: DESIMODEL=$(pwd) pytest --cov=desispec py/desispec/test + run: DESIMODEL=$(pwd) pytest --cov - name: Coveralls env: COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} diff --git a/setup.cfg b/setup.cfg index 1cee296c9..6aadb66f5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,34 @@ +[tool:pytest] +testpaths = py + +[coverage:run] +relative_files = True +source = + py/desispec +omit = + py/desispec/_version.py + py/desispec/conftest* + py/desispec/test/* + */desispec/_version.py + */desispec/conftest* + */desispec/test/* + +[coverage:report] +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + # Don't complain about packages we have installed + except ImportError + # Don't complain if tests don't hit assertions + raise AssertionError + raise NotImplementedError + # Don't complain about script hooks + def main\(.*\): + # Ignore branches that don't pertain to this version of Python + pragma: py{ignore_python_version} + # Don't complain about IPython completion helper + def _ipython_key_completions_ + [pycodestyle] # See https://pycodestyle.readthedocs.io/en/latest/intro.html#configuration # for details of these configuration options. From 443d51798082c8c52c8d43430be9eb91d2947bbe Mon Sep 17 00:00:00 2001 From: Stephen Bailey Date: Wed, 4 Dec 2024 17:09:46 -0800 Subject: [PATCH 27/27] remove deprecated/ dir; restore pytest auto-discovery --- .github/workflows/python-package.yml | 4 +- deprecated/bin/desi_daily_proc_manager | 179 -- deprecated/bin/desi_night | 16 - deprecated/bin/desi_pipe | 14 - deprecated/bin/desi_pipe_exec | 15 - deprecated/bin/desi_pipe_exec_mpi | 42 - deprecated/bin/desi_pipe_status | 16 - deprecated/bin/desi_qa_exposure | 17 - deprecated/bin/desi_qa_frame | 16 - deprecated/bin/desi_qa_night | 17 - deprecated/bin/desi_qa_prod | 16 - deprecated/bin/desi_qa_skyresid | 16 - deprecated/bin/desi_quicklook | 8 - deprecated/bin/wrap_desi_night.sh | 26 - deprecated/doc/pipeline.rst | 190 -- deprecated/doc/qa.rst | 327 --- deprecated/py/desispec/io/qa.py | 294 --- deprecated/py/desispec/pipeline/__init__.py | 26 - deprecated/py/desispec/pipeline/control.py | 1331 ---------- deprecated/py/desispec/pipeline/db.py | 1319 ---------- deprecated/py/desispec/pipeline/defs.py | 55 - deprecated/py/desispec/pipeline/plan.py | 492 ---- deprecated/py/desispec/pipeline/prod.py | 320 --- deprecated/py/desispec/pipeline/run.py | 610 ----- deprecated/py/desispec/pipeline/scriptgen.py | 426 ---- .../py/desispec/pipeline/tasks/__init__.py | 66 - deprecated/py/desispec/pipeline/tasks/base.py | 623 ----- .../py/desispec/pipeline/tasks/cframe.py | 147 -- .../py/desispec/pipeline/tasks/extract.py | 162 -- .../py/desispec/pipeline/tasks/fiberflat.py | 133 - .../desispec/pipeline/tasks/fiberflatnight.py | 152 -- .../py/desispec/pipeline/tasks/fibermap.py | 93 - .../py/desispec/pipeline/tasks/fluxcalib.py | 139 - .../py/desispec/pipeline/tasks/preproc.py | 162 -- deprecated/py/desispec/pipeline/tasks/psf.py | 169 -- .../py/desispec/pipeline/tasks/psfnight.py | 161 -- .../py/desispec/pipeline/tasks/qadata.py | 125 - .../py/desispec/pipeline/tasks/rawdata.py | 93 - .../py/desispec/pipeline/tasks/redshift.py | 207 -- deprecated/py/desispec/pipeline/tasks/sky.py | 136 - .../py/desispec/pipeline/tasks/spectra.py | 209 -- .../py/desispec/pipeline/tasks/starfit.py | 205 -- .../py/desispec/pipeline/tasks/traceshift.py | 144 -- deprecated/py/desispec/qa/__init__.py | 14 - deprecated/py/desispec/qa/html.py | 333 --- deprecated/py/desispec/qa/qa_brick.py | 115 - deprecated/py/desispec/qa/qa_exposure.py | 321 --- deprecated/py/desispec/qa/qa_frame.py | 417 --- deprecated/py/desispec/qa/qa_multiexp.py | 270 -- deprecated/py/desispec/qa/qa_night.py | 60 - deprecated/py/desispec/qa/qa_plots.py | 1584 ------------ deprecated/py/desispec/qa/qa_plots_ql.py | 756 ------ deprecated/py/desispec/qa/qa_prod.py | 121 - deprecated/py/desispec/qa/qa_quicklook.py | 2246 ----------------- deprecated/py/desispec/qa/qalib.py | 943 ------- deprecated/py/desispec/qa/utils.py | 78 - deprecated/py/desispec/quicklook/__init__.py | 17 - .../py/desispec/quicklook/arcprocess.py | 160 -- deprecated/py/desispec/quicklook/merger.py | 367 --- deprecated/py/desispec/quicklook/palib.py | 175 -- deprecated/py/desispec/quicklook/pas.py | 37 - deprecated/py/desispec/quicklook/procalgs.py | 1297 ---------- deprecated/py/desispec/quicklook/qas.py | 307 --- .../py/desispec/quicklook/ql_plotlib.py | 194 -- deprecated/py/desispec/quicklook/qlboxcar.py | 138 - deprecated/py/desispec/quicklook/qlconfig.py | 519 ---- .../py/desispec/quicklook/qlexceptions.py | 12 - .../py/desispec/quicklook/qlheartbeat.py | 67 - deprecated/py/desispec/quicklook/qllogger.py | 27 - deprecated/py/desispec/quicklook/qlpsf.py | 140 - .../py/desispec/quicklook/qlresolution.py | 63 - .../py/desispec/quicklook/quickfiberflat.py | 65 - deprecated/py/desispec/quicklook/quicklook.py | 378 --- deprecated/py/desispec/quicklook/quicksky.py | 211 -- .../py/desispec/scripts/daily_processing.py | 523 ---- deprecated/py/desispec/scripts/night.py | 563 ----- deprecated/py/desispec/scripts/pipe.py | 963 ------- deprecated/py/desispec/scripts/pipe_exec.py | 178 -- deprecated/py/desispec/scripts/qa_exposure.py | 90 - deprecated/py/desispec/scripts/qa_frame.py | 52 - deprecated/py/desispec/scripts/qa_night.py | 116 - deprecated/py/desispec/scripts/qa_prod.py | 142 -- deprecated/py/desispec/scripts/quicklook.py | 146 -- deprecated/py/desispec/scripts/skysubresid.py | 167 -- .../py/desispec/test/integration_test.py | 313 --- .../py/desispec/test/old_integration_test.py | 407 --- deprecated/py/desispec/test/test_qa.py | 318 --- deprecated/py/desispec/test/test_ql.py | 218 -- deprecated/py/desispec/test/test_ql_pa.py | 150 -- deprecated/py/desispec/test/test_ql_qa.py | 537 ---- deprecated/py/desispec/test/test_qlextract.py | 86 - 91 files changed, 2 insertions(+), 25317 deletions(-) delete mode 100755 deprecated/bin/desi_daily_proc_manager delete mode 100755 deprecated/bin/desi_night delete mode 100755 deprecated/bin/desi_pipe delete mode 100755 deprecated/bin/desi_pipe_exec delete mode 100755 deprecated/bin/desi_pipe_exec_mpi delete mode 100755 deprecated/bin/desi_pipe_status delete mode 100755 deprecated/bin/desi_qa_exposure delete mode 100755 deprecated/bin/desi_qa_frame delete mode 100755 deprecated/bin/desi_qa_night delete mode 100755 deprecated/bin/desi_qa_prod delete mode 100755 deprecated/bin/desi_qa_skyresid delete mode 100755 deprecated/bin/desi_quicklook delete mode 100755 deprecated/bin/wrap_desi_night.sh delete mode 100644 deprecated/doc/pipeline.rst delete mode 100644 deprecated/doc/qa.rst delete mode 100644 deprecated/py/desispec/io/qa.py delete mode 100644 deprecated/py/desispec/pipeline/__init__.py delete mode 100644 deprecated/py/desispec/pipeline/control.py delete mode 100644 deprecated/py/desispec/pipeline/db.py delete mode 100644 deprecated/py/desispec/pipeline/defs.py delete mode 100644 deprecated/py/desispec/pipeline/plan.py delete mode 100644 deprecated/py/desispec/pipeline/prod.py delete mode 100644 deprecated/py/desispec/pipeline/run.py delete mode 100644 deprecated/py/desispec/pipeline/scriptgen.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/__init__.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/base.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/cframe.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/extract.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/fiberflat.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/fiberflatnight.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/fibermap.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/fluxcalib.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/preproc.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/psf.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/psfnight.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/qadata.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/rawdata.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/redshift.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/sky.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/spectra.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/starfit.py delete mode 100644 deprecated/py/desispec/pipeline/tasks/traceshift.py delete mode 100644 deprecated/py/desispec/qa/__init__.py delete mode 100644 deprecated/py/desispec/qa/html.py delete mode 100644 deprecated/py/desispec/qa/qa_brick.py delete mode 100644 deprecated/py/desispec/qa/qa_exposure.py delete mode 100644 deprecated/py/desispec/qa/qa_frame.py delete mode 100644 deprecated/py/desispec/qa/qa_multiexp.py delete mode 100644 deprecated/py/desispec/qa/qa_night.py delete mode 100644 deprecated/py/desispec/qa/qa_plots.py delete mode 100644 deprecated/py/desispec/qa/qa_plots_ql.py delete mode 100644 deprecated/py/desispec/qa/qa_prod.py delete mode 100644 deprecated/py/desispec/qa/qa_quicklook.py delete mode 100644 deprecated/py/desispec/qa/qalib.py delete mode 100644 deprecated/py/desispec/qa/utils.py delete mode 100644 deprecated/py/desispec/quicklook/__init__.py delete mode 100644 deprecated/py/desispec/quicklook/arcprocess.py delete mode 100644 deprecated/py/desispec/quicklook/merger.py delete mode 100644 deprecated/py/desispec/quicklook/palib.py delete mode 100644 deprecated/py/desispec/quicklook/pas.py delete mode 100644 deprecated/py/desispec/quicklook/procalgs.py delete mode 100644 deprecated/py/desispec/quicklook/qas.py delete mode 100644 deprecated/py/desispec/quicklook/ql_plotlib.py delete mode 100644 deprecated/py/desispec/quicklook/qlboxcar.py delete mode 100644 deprecated/py/desispec/quicklook/qlconfig.py delete mode 100644 deprecated/py/desispec/quicklook/qlexceptions.py delete mode 100644 deprecated/py/desispec/quicklook/qlheartbeat.py delete mode 100644 deprecated/py/desispec/quicklook/qllogger.py delete mode 100644 deprecated/py/desispec/quicklook/qlpsf.py delete mode 100644 deprecated/py/desispec/quicklook/qlresolution.py delete mode 100644 deprecated/py/desispec/quicklook/quickfiberflat.py delete mode 100644 deprecated/py/desispec/quicklook/quicklook.py delete mode 100644 deprecated/py/desispec/quicklook/quicksky.py delete mode 100644 deprecated/py/desispec/scripts/daily_processing.py delete mode 100644 deprecated/py/desispec/scripts/night.py delete mode 100644 deprecated/py/desispec/scripts/pipe.py delete mode 100644 deprecated/py/desispec/scripts/pipe_exec.py delete mode 100644 deprecated/py/desispec/scripts/qa_exposure.py delete mode 100644 deprecated/py/desispec/scripts/qa_frame.py delete mode 100644 deprecated/py/desispec/scripts/qa_night.py delete mode 100644 deprecated/py/desispec/scripts/qa_prod.py delete mode 100644 deprecated/py/desispec/scripts/quicklook.py delete mode 100644 deprecated/py/desispec/scripts/skysubresid.py delete mode 100644 deprecated/py/desispec/test/integration_test.py delete mode 100644 deprecated/py/desispec/test/old_integration_test.py delete mode 100644 deprecated/py/desispec/test/test_qa.py delete mode 100644 deprecated/py/desispec/test/test_ql.py delete mode 100644 deprecated/py/desispec/test/test_ql_pa.py delete mode 100644 deprecated/py/desispec/test/test_ql_qa.py delete mode 100644 deprecated/py/desispec/test/test_qlextract.py diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index b8bcbd98b..646f70e02 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -46,7 +46,7 @@ jobs: python -m pip install --no-deps --force-reinstall --ignore-installed 'fitsio${{ matrix.fitsio-version }}' svn export https://desi.lbl.gov/svn/code/desimodel/${DESIMODEL_DATA}/data - name: Run the test - run: DESIMODEL=$(pwd) pytest py/desispec/test + run: DESIMODEL=$(pwd) pytest coverage: name: Test coverage @@ -85,7 +85,7 @@ jobs: python -m pip install --no-deps --force-reinstall --ignore-installed 'fitsio${{ matrix.fitsio-version }}' svn export https://desi.lbl.gov/svn/code/desimodel/${DESIMODEL_DATA}/data - name: Run the test with coverage - run: DESIMODEL=$(pwd) pytest --cov=desispec py/desispec/test + run: DESIMODEL=$(pwd) pytest --cov - name: Coveralls env: COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} diff --git a/deprecated/bin/desi_daily_proc_manager b/deprecated/bin/desi_daily_proc_manager deleted file mode 100755 index 386cf0aea..000000000 --- a/deprecated/bin/desi_daily_proc_manager +++ /dev/null @@ -1,179 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 - -import argparse -import socket -import sys -import os - -from desispec.scripts.daily_processing import daily_processing_manager -from desispec.io.util import parse_cameras -from desispec.workflow.proc_dashboard_funcs import get_skipped_ids - -from desispec.workflow.timing import during_operating_hours -from desispec.workflow.utils import check_running - -def parse_args():#options=None): - """ - Creates an arguments parser for the desi daily processing manager - """ - parser = argparse.ArgumentParser(description="Perform daily processing of spectral" + - "data using the DESI pipeline.") - - parser.add_argument("--cameras", type=str, required=False, default=None, - help="Explicitly define the cameras for which you want" + - " to reduce the data. Should be a comma separated list." + - " Only numbers assumes you want to reduce r, b, and z " + - "for that camera. Otherwise specify separately [brz][0-9].") - parser.add_argument("--bad-cameras", type=str, required=False, default=None, - help="Explicitly define the cameras that you don't want" + - " to reduce the data. Should be a comma separated list." + - " Only numbers assumes you want to reduce r, b, and z " + - "for that camera. Otherwise specify separately [brz][0-9].") - parser.add_argument("--badamps", type=str, required=False, default=None, - help="Define amplifiers that you know to be bad and should not" + - " be processed. Should be a list separated by comma or semicolon." + - " Saved list will converted to semicolons. Each entry should be of " + - "the form {camera}{spectrograph}{amp}, i.e. [brz][0-9][A-D].") - parser.add_argument("--exp-obstypes", type=str, default=None, required=False, - help="The basic data obstypes to save in the exposure table. " + - "E.g. science, dark, twilight, flat, arc, zero.") - parser.add_argument("--proc-obstypes", type=str, default=None, required=False, - help="The basic data obstypes to submit for processing. " + - "E.g. science, dark, twilight, flat, arc, zero.") - parser.add_argument("--z-submit-types", type=str, default='cumulative', required=False, - help="The group types of redshifts that should be submitted with each exposure. If not "+ - "specified, default for daily processing is 'cumulative'. If "+ - "'false' or 'None' then no redshifts are submitted") - parser.add_argument("--dry-run-level", type=int, default=0, required=False, - help="If nonzero, this is a simulated run. If dry_run=1 the scripts will be written but not submitted. "+ - "If dry_run=2, the scripts will not be written or submitted. Logging will remain the same "+ - "for testing as though scripts are being submitted. Default is 0 (false).") - # File and dir defs - parser.add_argument("-s", "--specprod", type=str, required=False, default=None, - help="Subdirectory under DESI_SPECTRO_REDUX to write the output files. "+\ - "Overwrites the environment variable SPECPROD") - parser.add_argument("-q", "--queue", type=str, required=False, default='realtime', - help="The queue to submit jobs to. Default is realtime.") - parser.add_argument("--exp-table-path", type=str, required=False, default=None, - help="Directory name where the output exposure table should be saved.") - parser.add_argument("--proc-table-path", type=str, required=False, default=None, - help="Directory name where the output processing table should be saved.") - parser.add_argument("--raw-data-path", type=str, required=False, default=None, - help="Directory name where the input raw data can be found.") - parser.add_argument("--table-file-type", type=str, required=False, default='csv', - help="File format and extension for the exp and proc tables.") - parser.add_argument("--data-cadence-time", type=int, required=False, default=300, - help="Wait time between loops in looking for new data.") - parser.add_argument("--queue-cadence-time", type=int, required=False, default=1800, - help="Wait time between loops in checking queue statuses and resubmitting failures.") - parser.add_argument("--exp-cadence-time", type=int, required=False, default=2, - help="Wait time between processing science exposures.") - parser.add_argument("--override-night", type=str,default=None, - help="Specify the night to run on. Overrides the current day.") - parser.add_argument("--ignore-expid-list", type=str,default=None, - help="Specify the expid's to ignore in a comma separated list given as a string.") - parser.add_argument("--ignore-expid-file", type=str,default=None, - help="Specify the expid's to ignore in a text file with one expid per line.") - # parser.add_argument("-r", "--reduxdir", type=str, required=False, - # help="Main reduction dir where specprod dir will reside.") - - # Code Flags - parser.add_argument("--ignore-instances", action="store_true", - help="Allow script to run even if another instance is " + - "running. Use with care.") - parser.add_argument("--ignore-cori-node", action="store_true", - help="Allow script to run on nodes other than cori21") - parser.add_argument("--dry-run", action="store_true", - help="Perform a dry run where no jobs are actually created or submitted. Overwritten if "+ - "dry-run-level is defined as nonzero.") - parser.add_argument("--no-redshifts", action="store_true", - help="Whether to submit redshifts or not. If set, redshifts are not submitted.") - parser.add_argument("--continue-looping-debug",action="store_true",help= "FOR DEBUG purposes only."+ - "Will continue looping in search of new data until the process is terminated externally.") - parser.add_argument("--dont-check-job-outputs", action="store_true", - help="If all files for a pending job exist and this is False, then the script will not be "+ - "submitted. If some files exist and this is True, only the"+ - "subset of the cameras without the final data products will be generated and submitted.") - parser.add_argument("--dont-resubmit-partial-jobs", action="store_true", - help="Must be False if --dont-check-job-outputs is False. If False, jobs with some prior data "+ - "are pruned using PROCCAMWORD to only process the remaining cameras not found to exist.") - parser.add_argument("--use-specter", action="store_true", - help="Use specter. Default is to use gpu_specter") - parser.add_argument("--use-tilenight", action=argparse.BooleanOptionalAction, - help="Use desi_proc_tilenight (or not) for prestdstar, stdstar, and poststdstar steps. "+ - "Default False for NERSC Cori, True otherwise") - # parser.add_argument("--force-specprod", action="store_true", - # help="Force the files to be written to custom SPECPROD " + - # "even if user is desi.") - # parser.add_argument("--scattered-light", action="store_true", - # help="Pass scattered light command to desi_proc. Fits and removes scattered light.") - # parser.add_argument("--most-recent-calib", action="store_true", - # help="Look backward in time for the most recent night with good calibration files." + \ - # " If not set the defaults in DESI_SPECTRO_CALIB are used.") - - # Read in command line and return - # if options is None: - # args = parser.parse_args() - # else: - # args = parser.parse_args(options) - args = parser.parse_args() - - if args.use_tilenight is None: - if 'NERSC_HOST' in os.environ and os.environ['NERSC_HOST'] == 'cori': - args.use_tilenight = False - else: - args.use_tilenight = True - elif args.use_tilenight and os.environ['NERSC_HOST'] == 'cori': - print('Tilenight is not supported on Cori. Exiting.') - sys.exit(1) - return args - - -if __name__ == '__main__': - args = parse_args() - if not during_operating_hours(dry_run=args.dry_run) and args.override_night is None: - from desispec.workflow.timing import get_nightly_start_time, get_nightly_end_time - start = get_nightly_start_time() - end = get_nightly_end_time() - print(f"Not during operating hours of {start} to {end}. Exiting") - sys.exit(1) - - if not args.ignore_cori_node and socket.gethostname() != 'cori21': - print('This should only run on cori21') - sys.exit(1) - - if not args.ignore_instances: - running = check_running(proc_name='desi_daily_proc_manager') - if not running: - print('OK to run') - else: - print("Process is already running. Exiting.") - sys.exit(1) - - camword = parse_cameras(args.cameras) - badcamword = parse_cameras(args.bad_cameras) - - exps_to_ignore = [] - if args.ignore_expid_list is not None: - expids = [int(val) for val in args.ignore_expid_list.split(',')] - exps_to_ignore.extend(expids) - if args.ignore_expid_file is not None and os.path.isfile(args.ignore_expid_file): - expids = get_skipped_ids(args.ignore_expid_file) - exps_to_ignore.extend(expids) - - daily_processing_manager(specprod=args.specprod, exp_table_path=args.exp_table_path, - proc_table_path=args.proc_table_path, path_to_data=args.raw_data_path, - expobstypes=args.exp_obstypes, procobstypes=args.proc_obstypes, - z_submit_types=args.z_submit_types, - tab_filetype=args.table_file_type, camword=camword, - badcamword=badcamword, badamps=args.badamps, queue=args.queue, - dry_run_level=args.dry_run_level, dry_run=args.dry_run, no_redshifts=args.no_redshifts, - override_night=args.override_night, exps_to_ignore=exps_to_ignore, - continue_looping_debug=args.continue_looping_debug, - data_cadence_time=args.data_cadence_time, queue_cadence_time=args.queue_cadence_time, - exp_cadence_time=args.exp_cadence_time, - dont_check_job_outputs=args.dont_check_job_outputs, - dont_resubmit_partial_jobs=args.dont_resubmit_partial_jobs, - use_specter=args.use_specter, - use_tilenight=args.use_tilenight) diff --git a/deprecated/bin/desi_night b/deprecated/bin/desi_night deleted file mode 100755 index 3534bf94a..000000000 --- a/deprecated/bin/desi_night +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -""" -Automate the nightly processing. -""" - -import sys -from desispec.scripts.night import Nightly - -if __name__ == '__main__': - n = Nightly() - sys.stdout.flush() diff --git a/deprecated/bin/desi_pipe b/deprecated/bin/desi_pipe deleted file mode 100755 index 958beea8b..000000000 --- a/deprecated/bin/desi_pipe +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -""" -Interact with a DESI production -""" - -from desispec.scripts.pipe import PipeUI - -if __name__ == '__main__': - p = PipeUI() diff --git a/deprecated/bin/desi_pipe_exec b/deprecated/bin/desi_pipe_exec deleted file mode 100755 index e789ed4ee..000000000 --- a/deprecated/bin/desi_pipe_exec +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -"""Run one or more DESI pipeline tasks of a single type. -""" - -import sys -import desispec.scripts.pipe_exec as pipe_exec - -if __name__ == '__main__': - args = pipe_exec.parse() - sys.exit(pipe_exec.main(args)) diff --git a/deprecated/bin/desi_pipe_exec_mpi b/deprecated/bin/desi_pipe_exec_mpi deleted file mode 100755 index b0648f00e..000000000 --- a/deprecated/bin/desi_pipe_exec_mpi +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -"""Use MPI to run one or more DESI pipeline tasks of a single type. -""" - -import sys -from desispec.parallel import use_mpi - -comm = None -rank = 0 -nproc = 1 -if use_mpi(): - from mpi4py import MPI - comm = MPI.COMM_WORLD - rank = comm.rank - nproc = comm.size -else: - print("mpi4py not found, using only one process") - -# FIXME: Can we can do better than this hack? -# Add a random delay before starting to avoid too many processes loading the -# same library files at the same time this is hopefully a temporary hack -# we can have as many as 6000 procs, and we accept to lose at max 1 minute -import time -import numpy.random -numpy.random.seed(rank) -sec = numpy.random.uniform() * 0.01 * nproc -if rank == 0 : - print("Each proc will wait a few seconds before starting, max is " - "{} sec".format(0.01*nproc)) - sys.stdout.flush() -time.sleep(sec) - -import desispec.scripts.pipe_exec as pipe_exec - -if __name__ == '__main__': - args = pipe_exec.parse() - sys.exit(pipe_exec.main(args, comm=comm)) diff --git a/deprecated/bin/desi_pipe_status b/deprecated/bin/desi_pipe_status deleted file mode 100755 index 781862c33..000000000 --- a/deprecated/bin/desi_pipe_status +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -""" -Interactively display the status of a pipeline production. -""" - -import sys -import desispec.scripts.pipe_status as pipe_status - -if __name__ == '__main__': - sys.exit(pipe_status.main()) - diff --git a/deprecated/bin/desi_qa_exposure b/deprecated/bin/desi_qa_exposure deleted file mode 100755 index 67cad3218..000000000 --- a/deprecated/bin/desi_qa_exposure +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -""" -This script generates QA related to an Exposure -""" - -import sys -import desispec.scripts.qa_exposure as qa_exposure - - -if __name__ == '__main__': - args = qa_exposure.parse() - sys.exit(qa_exposure.main(args)) diff --git a/deprecated/bin/desi_qa_frame b/deprecated/bin/desi_qa_frame deleted file mode 100755 index 651cd8e5e..000000000 --- a/deprecated/bin/desi_qa_frame +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -""" -This script generates QA related to a Frame -""" - -import sys -import desispec.scripts.qa_frame as qa_frame - -if __name__ == '__main__': - args = qa_frame.parse() - sys.exit(qa_frame.main(args)) diff --git a/deprecated/bin/desi_qa_night b/deprecated/bin/desi_qa_night deleted file mode 100755 index 3d44f4d23..000000000 --- a/deprecated/bin/desi_qa_night +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -""" -This script generates QA related to a night -""" - -import sys -import desispec.scripts.qa_night as qa_night - - -if __name__ == '__main__': - args = qa_night.parse() - sys.exit(qa_night.main(args)) diff --git a/deprecated/bin/desi_qa_prod b/deprecated/bin/desi_qa_prod deleted file mode 100755 index 0f84844d9..000000000 --- a/deprecated/bin/desi_qa_prod +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -""" -This script generates QA related to a production -""" - -import sys -import desispec.scripts.qa_prod as qa_prod - -if __name__ == '__main__': - args = qa_prod.parse() - sys.exit(qa_prod.main(args)) diff --git a/deprecated/bin/desi_qa_skyresid b/deprecated/bin/desi_qa_skyresid deleted file mode 100755 index 091fff022..000000000 --- a/deprecated/bin/desi_qa_skyresid +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- - -""" -This script generates QA related to a production -""" - -import sys -import desispec.scripts.skysubresid as skysubresid - -if __name__ == '__main__': - args = skysubresid.parse() - sys.exit(skysubresid.main(args)) diff --git a/deprecated/bin/desi_quicklook b/deprecated/bin/desi_quicklook deleted file mode 100755 index fc497391c..000000000 --- a/deprecated/bin/desi_quicklook +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python -""" -Run DESI quicklook pipeline on a given exposure -""" - -import sys -from desispec.scripts import quicklook -sys.exit(quicklook.ql_main(quicklook.parse())) diff --git a/deprecated/bin/wrap_desi_night.sh b/deprecated/bin/wrap_desi_night.sh deleted file mode 100755 index 6e6fd4549..000000000 --- a/deprecated/bin/wrap_desi_night.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -#- This script is called by the data transfer daemon via ssh -#- to launch new jobs for each exposure as they arrive. - -#- Add module locations that are not included by default when spawning -#- a command via ssh -module use /usr/common/software/modulefiles - -#- define the software environment to use -source /project/projectdirs/desi/software/desi_environment.sh 18.11 - -#- define the production specific environment variables -source /global/cscratch1/sd/desi/desi/spectro/redux/nightly/setup.sh - -#- call desi_night with whatever args were passed in -echo RUNNING desi_night $@ -# echo ' (not really)' -desi_night $@ - -#- debugging; maybe leave on to show what happens? -echo 'Production database tasks:' -desi_pipe top --once -echo 'Batch jobs in the queue:' -squeue -u $USER - diff --git a/deprecated/doc/pipeline.rst b/deprecated/doc/pipeline.rst deleted file mode 100644 index 6be8a6aab..000000000 --- a/deprecated/doc/pipeline.rst +++ /dev/null @@ -1,190 +0,0 @@ -.. _pipeline: - -Pipeline Use -========================= - -The DESI spectroscopic pipeline is used to run real or simulated data through one or more stages of a standard sequence of processing operations. The pipeline is designed to function on a supercomputer (e.g. NERSC) or cluster, but can also run locally for small data tests. - - -Overview ------------------------- - -The starting point of the pipeline is real or simulated raw exposures. These exposures are either arcs, flats, or science exposures. The exposures are grouped by night. Each exposure consists of images from up to 10 spectrographs with 3 cameras (r, b, z) each. The processing "steps" that are defined in the pipeline are: - -* **preproc**: (all exposure types) Apply image pre-processing. -* **psf**: (only for arcs) Estimate the PSF. -* **psfnight**: (one per night, only for arcs) Combine all PSF estimates for the night. -* **traceshift**: (only for flats and science) Compute the trace locations in preparation for extractions. -* **extract**: (only for flats and science) Extract the maximum likelihood spectra from the pixel data. -* **fiberflat**: (only for flats) Compute a fiber flat from an extracted continuum lamp exposure. -* **fiberflatnight**: (one per night, only for flats) Build the nightly fiberflat. -* **sky**: (only for science) Apply the fiberflat to sky fibers to compute the sky model. -* **starfit**: (only for science) For each spectrograph, apply fiberflat and sky subtraction to standards and fit the result to stellar models. -* **fluxcalib**: (only for science) Apply the fiberflat and sky subtraction to science fibers and then calibrate against the stellar fits. -* **cframe**: (only for science) Apply the calibration to the extracted frames. -* **spectra**: The calibrated output science spectra are re-grouped into files based on their sky location (healpix pixels). -* **redshift**: The redshifts are estimated from re-grouped spectral files. - -For a given pipeline "step", there are frequently many independent processing "tasks" that can be batched together. Each processing task usually has some input dependencies (data files) and generates some outputs. In general, a single task has exactly one output file. This allows reconstruction of the state of the processing from examining the filesystem. The pipeline infrastructure is designed to track the dependencies between tasks as well as the current state of each task. When the pipeline actually does the processing, it generates scripts (either slurm scripts for submission to a queueing system or plain bash scripts) that batch together many tasks. - -**Example:** Imagine you had 5 arc exposures you wanted to estimate the PSF on in one job. Estimating the PSF for one exposure consists of 30 individual tasks (one per spectrograph and camera), so there are 150 tasks in this example. Additionally, each of those tasks can run in parallel using one MPI process per fiber bundle and several threads per process. - -For a single set of raw data, we might want to have multiple "data reductions" that use different versions of the pipeline software or use different options for the processing. Each independent reduction of some raw data is called a "production". A "production" on disk consists of a directory hierarchy where the data outputs, logs, and scripts are stored. A database is used to track the dependencies and states of all tasks in a production. - - -User Interface --------------------- - -As discussed above, a single data processing "production" essentially consists of a database and a directory structure of job scripts, logs, and output data files. The primary user interface for running the pipeline on a specific production is the `desi_pipe` command line tool. This takes a command followed by command-specific options. If you want to write a custom script which controls the pipeline in a particular way, then you can also call the same high-level interface used by `desi_pipe`. This interface is found in the `desispec.pipeline.control` module. - -Command Help -~~~~~~~~~~~~~~~~~~ - -An overview of available commands can be displayed with: - -.. include:: _static/desi_pipe_help.inc - -Creating a Production -~~~~~~~~~~~~~~~~~~~~~~~~ - -The first step to using the pipeline is to create a "production" directory for the data processing outputs: - -.. include:: _static/desi_pipe_create.inc - -Before creating a production you should have on hand some information about the data and tools you want to use: - - 1. The location of the raw data. - 2. The location of the "spectro" directory containing various auxiliary - files (this is location you want to become $DESI_ROOT for the - production). - 3. The location of the "top-level" directory where you want to put your - productions. - 4. The name of your production (which will become a subdirectory). - 5. The spectro calibration data from an svn checkout. - 6. The basis templates data from an svn checkout. - -Here is an example, using some simulated data from a survey validation data challenge: - -.. include:: _static/desi_pipe_create_ex.inc - -This creates the production directory and subdirectories for all output data products considering the raw data that exists at the time you run the command. If you add new raw data to your data directory, see the "update" command below. - -Just creating a production does not change anything in your environment and the pipeline has no idea how many productions you have created. In order to "activate" your production and use it for future desi_pipe commands, you must source the setup.sh file. In the example above, you would now do: - -.. code-block:: console - - source ./desi_test/redux/svdc/setup.sh - -And now all future commands will use this production. - - -Monitoring a Production -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -For a quick snapshot of the production you can use the "top" command to display updates on the number of tasks in different states. This is refreshed every 10 seconds. For a single snapshot we can use the "--once" option. Building on our example above: - -.. include:: _static/desi_pipe_top_ex.inc - -Here we see that no tasks have been run yet. The "preproc" tasks are in the "ready" state (their dependencies are met). The remaining tasks are in the "waiting" state, since their dependencies are not yet complete. - -Whenever a single task runs, it will write a log specific to that task. This file can always be found in the same place within the production directory (run/logs/night/[night]/). If you re-run a specific task (either because it failed or you simpled wanted to run it again), then the per-task log is overwritten in the same location. The pipeline only tracks the current state of a task from its most recent execution, and the per-task log is the output from that most recent run. - -The logs from a "job" (the simultaneous batched execution of many tasks) is stored in a per-job directory located in run/scripts/ and named according to the range of processing steps run in the job, the date and job ID. These logs will contain output about the overall number of tasks that were run, how many tasks succeeded and failed, and any errors due to the scheduling system or runtime environment. A new log directory is created for every job that is submitted. - - -Processing Data with Minimal Interaction -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -When doing large-scale processing (or re-processing) of many nights of exposures, it is convenient to have a high-level wrapper that submits many jobs to the queueing system with dependencies between jobs to ensure that the processing happens in the correct sequence. This can be done using the "go" command: - -.. include:: _static/desi_pipe_go.inc - -There are many options to this command that control things like the NERSC sytem to use, the job submission queue, the maximum runtime and number of nodes to use, etc. By default, jobs are submitted to the regular queue with maximum job sizes and run times given by the limits for that queue. Before using non-default values for these at NERSC, you should read and familiarize yourself with the different queues and their limits in the NERSC online documentation. - -If the "--nersc" option is not specified, then bash scripts will be generated. You can use other options to enable the use of MPI in these bash scripts and specify the node sizes and properties. - -Continuing our example, we could submit several jobs to process all tasks on the cori-knl nodes with: - -.. include:: _static/desi_pipe_go_ex.inc - -This will submit 3 jobs per night and a final job to do the spectral regrouping and redshift fitting. If some of these jobs fail for some reason, you can cleanup the production (see the cleanup command below with the "--submitted" option) and then re-run the "go" command with the "--resume" option: - -.. code-block:: console - - $> desi_pipe go --nersc cori-knl --resume - - -Updating a Production -~~~~~~~~~~~~~~~~~~~~~~~~~ - -When new raw data arrives in the input data directory, we must add the processing tasks for this new data to our database. This is done using the "update" command: - -.. include:: _static/desi_pipe_update.inc - -By default, the update command looks across all nights in the raw data. This can be time consuming if you have only added a new night of data or a single exposure. Use the options above to restrict the update to only certain nights or exposures. - - -Cleaning Up When Jobs Fail -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -There will always be cases where jobs submitted to a queuing system on a large supercomputer will fail. This could be due to a problem with the scheduler, a problem with the filesystem that makes jobs take longer and run out of time, etc. During the running of a job, the state of individual **tasks** are updated as they complete. Even when a job dies or is killed, any completed tasks are marked as done. However, tasks that were in a "running" state when the job ended need to be reset into the "ready" state. This is done using the "cleanup" command: - -.. include:: _static/desi_pipe_cleanup.inc - -You should only run this command if there are no pipeline jobs from the current production running. Additionally, if you are using the "desi_pipe go" command, then tasks already submitted are ignored in future runs. In that case you must use the "--submitted" option to the cleanup command. - - -Manually Running Processing Steps -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Manually running pipeline steps involves first selecting tasks and then running some set of processing steps on these using all the various NERSC queue options. - -**TO-DO:** Document the commands for all this, including: - - * tasks - * dryrun - * check - * script - * run - * chain - -When Something Goes Wrong ---------------------------------- - -If a job dies, even if due to an external system issue, it is always good to look at the job logs and verify that everything went well up to the point that it failed. The job logs are organized in the run/scripts directory and named after the steps being run, the date and the job ID. For NERSC jobs, you can browse https://my.nersc.gov to get a list of all jobs you have submitted. After verifying that the job ended due to external factors, you can cleanup (see above) and retry. - -A pipeline job usually runs many individual tasks. Each task can succeed or fail independently. A pipeline job might complete successfully (from the viewpoint of the queueing system) even if some individual tasks fail. If all tasks fail, the job will exit with a non-zero exit code so that future jobs with a dependency hold are not launched. - -If you have a job where one or more tasks failed, you should examine the logs for that task. As discussed before, the per-task logs are in run/logs. - -In an extreme case where you believe the production database is invalid or corrupted, you can force the re-creation of the database using only the files that exist on disk. Ensure that all jobs are killed and then do: - -.. code-block:: console - - $> desi_pipe sync - -This scans the outputs of the production and generates a new DB from scratch. - - -Example 1: Large (Re)Processing of Many Exposures ---------------------------------------------------------- - -Our in-line example in the usage section shows how "desi_pipe go" can be used to submit sets of jobs (3 per night) in a dependency chain and then a final job to do the spectral regrouping and redshift fitting. - - -Example 2: Process One Exposure --------------------------------------------- - - - - -Example 3: Nightly Processing ---------------------------------------- - -TO-DO: Document what happens when the "desi_night" command is triggered by the data transfer. - - -Example 4: Skipping Steps Using External Tools --------------------------------------------------- - -If you use some of the DESI "quick" simulation tools to produce uncalibrated frame data (or calibrated frame data) directly, then there is a special step that must be taken. In this scenario, the production database has no idea that you have injected data into the processing chain. The only option is to use a recovery step ("desi_pipe sync") which will scan the production output directories and rebuild the database with your injected files included in the dependencies and marked as "done". diff --git a/deprecated/doc/qa.rst b/deprecated/doc/qa.rst deleted file mode 100644 index 5734bf7a1..000000000 --- a/deprecated/doc/qa.rst +++ /dev/null @@ -1,327 +0,0 @@ -.. _qa: - -***************** -Quality Assurance -***************** - -Overview -======== - -The DESI spectroscopic pipeline includes a series of -routines that monitor the quality of the pipeline products -and may be used to inspect outputs across exposures, nights, -or a full production. - -Expose QA -========= - -Here is the magic to expose a set of QA products -made at NERSC to the world: - -1. cp -rp QA into www area :: /project/projectdirs/desi/www -2. fix_permissions.sh -a QA [This may no longer be necessary] - -These are then exposed at https://portal.nersc.gov/cfs/desi/rest_of_path - -Scripts -======= - -desi_qa_frame -+++++++++++++ - -Generate the QA for an input frame file. -The code can be written anywhere and the -output is written to its "proper" location. - -usage ------ - -Here is the usage:: - - usage: desi_qa_frame [-h] --frame_file FRAME_FILE [--reduxdir PATH] - [--make_plots] - - Generate Frame Level QA [v0.4.2] - - optional arguments: - -h, --help show this help message and exit - --frame_file FRAME_FILE - Frame filename. Full path is not required nor desired. - --reduxdir PATH Override default path ($DESI_SPECTRO_REDUX/$SPECPROD) - to processed data. - --make_plots Generate QA figs too? - - -examples --------- - -Generate the QA YAML file:: - - desi_qa_frame --frame_file=frame-r7-00000077.fits - -Generate the QA YAML file and figures:: - - desi_qa_frame --frame_file=frame-r7-00000077.fits --make_plots - -desi_qa_exposure -++++++++++++++++ - -Generates Exposure level QA. The current -implementation is only for the flat flux. - -usage ------ - -Here is the usage:: - - usage: desi_qa_exposure [-h] --expid EXPID [--qatype QATYPE] - [--channels CHANNELS] [--reduxdir PATH] [--rebuild] - [--qamulti_root QAMULTI_ROOT] [--slurp SLURP] - - Generate Exposure Level QA [v0.5.0] - - optional arguments: - -h, --help show this help message and exit - --expid EXPID Exposure ID - --qatype QATYPE Type of QA to generate [fiberflat, s2n] - --channels CHANNELS List of channels to include. Default = b,r,z] - --reduxdir PATH Override default path ($DESI_SPECTRO_REDUX/$SPECPROD) - to processed data. - --rebuild Regenerate the QA files for this exposure? - --qamulti_root QAMULTI_ROOT - Root name for a set of slurped QA files (e.g. - mini_qa). Uses $SPECPROD/QA for path - --slurp SLURP Root name for slurp QA file to add to (e.g. mini_qa). - Uses $SPECPROD/QA for path - -fiberflat ---------- - -Generate QA on the fiber flat across the exposure for one or more channels.:: - - desi_qa_exposure --expid=96 --qatype=fiberflat - - - -desi_qa_skyresid -++++++++++++++++ - -This script examines sky subtraction resdiuals -for an exposure, night or production. - -usage ------ - -Here is the usage:: - - usage: desi_qa_skyresid [-h] [--reduxdir PATH] [--expid EXPID] [--night NIGHT] - [--channels CHANNELS] [--prod] [--gauss] - [--nights NIGHTS] - - Generate QA on Sky Subtraction residuals [v0.4.2] - - optional arguments: - -h, --help show this help message and exit - --reduxdir PATH Override default path ($DESI_SPECTRO_REDUX/$SPECPROD) - to processed data. - --expid EXPID Generate exposure plot on given exposure - --night NIGHT Generate night plot on given night - --channels CHANNELS List of channels to include - --prod Results for full production run - --gauss Expore Gaussianity for full production run - --nights NIGHTS List of nights to limit prod plots - - -Exposure --------- - -Generate a plot of the sky subtraction residuals for an -input Exposure ID. e.g. :: - - desi_qa_sky --expid=123 - -Production ----------- - -Generate a plot of the sky subtraction residuals for the -Production. If reduxdir is not provided, then the script -will use the $SPECPROD and $DESI_SPECTRO_REDUX environemental -variables. Simply called:: - - desi_qa_sky --prod - -Gaussianity ------------ - -Examine whether the residuals are distributed -as Gaussian statistics. Here is an example:: - - - desi_qa_sky --gauss - - -desi_qa_night -+++++++++++++ - -This script is used to analyze the QA outputs -from a given night. Note that we use desi_qa_prod (below) -to generate the QA YAML files. - -usage ------ - -Here is the usage:: - - usage: desi_qa_night [-h] [--expid_series] [--bright_dark BRIGHT_DARK] - [--qaprod_dir QAPROD_DIR] [--specprod_dir SPECPROD_DIR] - [--night NIGHT] - - Generate/Analyze Production Level QA [v0.5.0] - - optional arguments: - -h, --help show this help message and exit - --expid_series Generate exposure series plots. - --bright_dark BRIGHT_DARK - Restrict to bright/dark (flag: 0=all; 1=bright; - 2=dark; only used in time_series) - --qaprod_dir QAPROD_DIR - Path to where QA is generated. Default is qaprod_dir - --specprod_dir SPECPROD_DIR - Path to spectro production folder. Default is - specprod_dir - --night NIGHT Night; required - - -Current recommendation ----------------------- - -First generate the QA for the given night with desi_qa_prod, e.g.:: - - desi_qa_prod --make_frameqa 1 --specprod_dir /global/projecta/projectdirs/desi/spectro/redux/daily --night 20200224 --qaprod_dir /global/projecta/projectdirs/desi/spectro/redux/xavier/daily/QA --slurp - - -Then generate the Night plots:: - - desi_qa_night --specprod_dir /global/projecta/projectdirs/desi/spectro/redux/daily --qaprod_dir /global/projecta/projectdirs/desi/spectro/redux/xavier/daily/QA --night 20200224 --expid_series - - -desi_qa_prod -++++++++++++ - -This script is used to both generate and analyze the -QA outputs for a complete production. - -usage ------ - -Here is the usage:: - - usage: desi_qa_prod [-h] [--make_frameqa MAKE_FRAMEQA] [--slurp] [--remove] - [--clobber] [--channel_hist CHANNEL_HIST] - [--time_series TIME_SERIES] [--bright_dark BRIGHT_DARK] - [--html] [--qaprod_dir QAPROD_DIR] [--S2N_plot] - [--ZP_plot] [--xaxis XAXIS] - - Generate/Analyze Production Level QA [v0.5.0] - - optional arguments: - -h, --help show this help message and exit - --make_frameqa MAKE_FRAMEQA - Bitwise flag to control remaking the QA files (1) and - figures (2) for each frame in the production - --slurp slurp production QA files into one? - --remove remove frame QA files? - --clobber clobber existing QA files? - --channel_hist CHANNEL_HIST - Generate channel histogram(s) - --time_series TIME_SERIES - Generate time series plot. Input is QATYPE-METRIC, - e.g. SKYSUB-MED_RESID - --bright_dark BRIGHT_DARK - Restrict to bright/dark (flag: 0=all; 1=bright; - 2=dark; only used in time_series) - --html Generate HTML files? - --qaprod_dir QAPROD_DIR - Path to where QA is generated. Default is qaprod_dir - --S2N_plot Generate a S/N plot for the production (vs. xaxis) - --ZP_plot Generate a ZP plot for the production (vs. xaxis) - --xaxis XAXIS Specify x-axis for S/N and ZP plots - - -frameqa -------- - -One generates the frame QA, the YAML and/or figure files -with the --make_frameqa flag. These files are created -in a folder tree QA/ that is parallel to the exposures and -calib2d folders.:: - - desi_qa_prod --make_frameqa=1 # Generate all the QA YAML files - desi_qa_prod --make_frameqa=2 # Generate all the QA figure files - desi_qa_prod --make_frameqa=3 # Generate YAML and figures - -The optional --remove and --clobber flags can be used to remove/clobber -the QA files. - -slurp ------ - -By using the --slurp flag, one generates a full -YAML file of all the QA outputs:: - - desi_qa_prod --slurp # Collate all the QA YAML files into a series of JSON files, one per night - desi_qa_prod --slurp --remove # Collate and remove the individual files - -html ----- - -A set of static HTML files that provide simple links -to the QA figures may be generated:: - - desi_qa_prod --html # Generate HTML files - -The top-level QA file (in the QA/ folder) includes any PNG -files located at the top-level of that folder. - -Channel Histograms ------------------- - -Using the --channel_hist flag, the script will generate a series -of histogram plots on default metrics: FIBERFLAT: MAX_RMS, -SKYSUB: MED_RESID, FLUXCALIB: MAX_ZP_OFF:: - - desi_qa_prod --channel_hist - -Time Series Plot ----------------- - -Using the --time_series input with a *qatype* and *metric* produces -a Time Series plot of that metric for all nights/exposures/frames -in the production, by channel, e.g.:: - - desi_qa_prod --time_series=SKYSUB-MED_RESID - desi_qa_prod --time_series=FLUXCALIB-ZP - -By default, these files are placed in the QA/ folder in -the $DESI_SPECTRO_REDUX/$SPECPROD folder. - - Plot ----------- - -Generate a plot of for a standard set of fiducials -- -object type at a given magnitude in a given channel -(e.g. ELG, 23 mag in channel r). The x-axis is controlled -by the `--xaxis` option and may be MJD, texp (exposure time), -or expid. Here is a sample call:: - - desi_qa_prod --S2N_plot --xaxis texp - -ZP Plot -------- - -Similar to the plot above but for the Zero Point -calculated in the three channels. Again, `--xaxis` -controls the abscissa axis. An example:: - - desi_qa_prod --ZP_plot --xaxis texp - diff --git a/deprecated/py/desispec/io/qa.py b/deprecated/py/desispec/io/qa.py deleted file mode 100644 index 2d8630526..000000000 --- a/deprecated/py/desispec/io/qa.py +++ /dev/null @@ -1,294 +0,0 @@ -""" -desispec.io.qa -============== - -IO routines for QA. -""" -from __future__ import print_function, absolute_import, division - -import os, yaml -import json - -from desiutil.io import yamlify - -from desispec.io import findfile, read_meta_frame -from desispec.io.util import makepath -from desiutil.log import get_logger -from .util import checkgzip -# log=get_logger() - - -def qafile_from_framefile(frame_file, qaprod_dir=None, output_dir=None): - """ Derive the QA filename from an input frame file - Args: - frame_file: str - output_dir: str, optional Over-ride default output path - qa_dir: str, optional Over-ride default QA - - Returns: - - """ - frame_file = checkgzip(frame_file) - frame_meta = read_meta_frame(frame_file) - night = frame_meta['NIGHT'].strip() - camera = frame_meta['CAMERA'].strip() - expid = int(frame_meta['EXPID']) - if frame_meta['FLAVOR'] in ['flat', 'arc']: - qatype = 'qa_calib' - else: - qatype = 'qa_data' - # Name - qafile = findfile(qatype, night=night, camera=camera, expid=expid, - outdir=output_dir, qaprod_dir=qaprod_dir) - # Return - return qafile, qatype - - -def read_qa_data(filename): - """Read data from a QA file - """ - # Read yaml - with open(filename, 'r') as infile: - qa_data = yaml.safe_load(infile) - # Convert expid to int - for night in qa_data.keys(): - for expid in list(qa_data[night].keys()): - if isinstance(expid,str): - qa_data[night][int(expid)] = qa_data[night][expid].copy() - qa_data[night].pop(expid) - # Return - return qa_data - - -def read_qa_brick(filename): - """Generate a QA_Brick object from a data file - """ - from desispec.qa.qa_brick import QA_Brick - # Read - qa_data = read_qa_data(filename) - - # Instantiate - qabrick = QA_Brick(in_data=qa_data) - - return qabrick - - -def read_qa_frame(filename): - """Generate a QA_Frame object from a data file - """ - from desispec.qa.qa_frame import QA_Frame - #- check if filename is (night, expid, camera) tuple instead - if not isinstance(filename, str): - night, expid, camera = filename - filename = findfile('qa', night, expid, camera) - - # Read - filename = checkgzip(filename) - qa_data = read_qa_data(filename) - - # Instantiate - qaframe = QA_Frame(qa_data) - - return qaframe - - -def load_qa_frame(filename, frame_meta=None, flavor=None): - """ Load an existing QA_Frame or generate one, as needed - - Args: - filename: str - frame_meta: dict like, optional - flavor: str, optional - Type of QA_Frame - - Returns: - qa_frame: QA_Frame object - """ - from desispec.qa.qa_frame import QA_Frame - log=get_logger() - if os.path.isfile(filename): # Read from file, if it exists - qaframe = read_qa_frame(filename) - log.info("Loaded QA file {:s}".format(filename)) - # Check against frame, if provided - if frame_meta is not None: - for key in ['camera','expid','night','flavor']: - assert str(getattr(qaframe, key)) == str(frame_meta[key.upper()]) - else: # Init - if frame_meta is None: - log.error("QA file {:s} does not exist. Expecting frame input".format(filename)) - qaframe = QA_Frame(frame_meta) - # Set flavor? - if flavor is not None: - qaframe.flavor = flavor - # Return - return qaframe - - -def load_qa_brick(filename): - """ Load an existing QA_Brick or generate one, as needed - Args: - filename: str - - Returns: - qa_brick: QA_Brick object - """ - from desispec.qa.qa_brick import QA_Brick - log=get_logger() - if os.path.isfile(filename): # Read from file, if it exists - qabrick = read_qa_brick(filename) - log.info("Loaded QA file {:s}".format(filename)) - else: # Init - qabrick = QA_Brick() - # Return - return qabrick - -def write_qa_brick(outfile, qabrick): - """Write QA for a given exposure - - Args: - outfile : filename - qabrick : QA_Brick object - _data: dict of QA info - """ - outfile = makepath(outfile, 'qa') - - # Simple yaml - ydict = yamlify(qabrick.data) - with open(outfile, 'w') as yamlf: - yamlf.write(yaml.dump(ydict))#, default_flow_style=True) ) - - return outfile - - -def write_qa_frame(outfile, qaframe, verbose=False): - """Write QA for a given frame - - Args: - outfile : str - filename - qa_exp : QA_Frame object, with the following attributes - qa_data: dict of QA info - """ - log=get_logger() - outfile = makepath(outfile, 'qa') - - # Generate the dict - odict = {qaframe.night: {qaframe.expid: {qaframe.camera: {}, 'flavor': qaframe.flavor}}} - odict[qaframe.night][qaframe.expid][qaframe.camera] = qaframe.qa_data - ydict = yamlify(odict) - # Simple yaml - with open(outfile, 'w') as yamlf: - yamlf.write(yaml.dump(ydict)) - if verbose: - log.info("Wrote QA frame file: {:s}".format(outfile)) - - return outfile - - -def write_qa_exposure(outroot, qaexp, ret_dict=False): - """Write QA for a given exposure - - Args: - outroot : str - filename without format extension - qa_exp : QA_Exposure object - ret_dict : bool, optional - Return dict only? [for qa_prod, mainly] - Returns: - outfile or odict : str or dict - """ - # Generate the dict - odict = {qaexp.night: {qaexp.expid: {}}} - odict[qaexp.night][qaexp.expid]['flavor'] = qaexp.flavor - odict[qaexp.night][qaexp.expid]['meta'] = qaexp.meta - cameras = list(qaexp.data['frames'].keys()) - for camera in cameras: - odict[qaexp.night][qaexp.expid][camera] = qaexp.data['frames'][camera] - # Return dict only? - if ret_dict: - return odict - # Simple yaml - ydict = yamlify(odict) - outfile = outroot+'.yaml' - outfile = makepath(outfile, 'qa') - with open(outfile, 'w') as yamlf: - yamlf.write( yaml.dump(ydict))#, default_flow_style=True) ) - - return outfile - - -def load_qa_multiexp(inroot): - """Load QA for a given production - - Args: - inroot : str - base filename without format extension - Returns: - odict : dict - """ - log=get_logger() - infile = inroot+'.json' - log.info("Loading QA prod file: {:s}".format(infile)) - # Read - if not os.path.exists(infile): - log.info("QA prod file {:s} does not exist!".format(infile)) - log.error("You probably need to generate it with desi_qa_prod --make_frameqa=3 --slurp") - with open(infile, 'rt') as fh: - odict = json.load(fh) - # Return - return odict - - -def write_qa_multiexp(outroot, mdict, indent=True): - """Write QA for a given production - - Args: - outroot : str - filename without format extension - mdict : dict - - Returns: - outfile: str - output filename - """ - log=get_logger() - outfile = outroot+'.json' - outfile = makepath(outfile, 'qa') - - ydict = yamlify(mdict) # This works well for JSON too - # Simple json - with open(outfile, 'wt') as fh: - json.dump(ydict, fh, indent=indent) - log.info('Wrote QA Multi-Exposure file: {:s}'.format(outfile)) - - return outfile - - -def write_qa_ql(outfile, qaresult): - """Write QL output files - - Args: - outfile : str - filename to be written (yaml) - qaresult : dict - QAresults from run_qa() - - Returns: - outfile : str - """ - #import yaml - #from desiutil.io import yamlify - # Take in QL input and output to yaml - #SE: No yaml creation as of May 2018 - qadict = yamlify(qaresult) - #f=open(outfile,"w") - #f.write(yaml.dump(qadict)) - #f.close() - - g=open(outfile,"w") - json.dump(qadict, g, sort_keys=True, indent=4) - g.close() - - return outfile - - diff --git a/deprecated/py/desispec/pipeline/__init__.py b/deprecated/py/desispec/pipeline/__init__.py deleted file mode 100644 index 30d534a76..000000000 --- a/deprecated/py/desispec/pipeline/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline -================= - -Tools for pipeline creation and running. -""" -from __future__ import absolute_import, division, print_function - -from . import tasks - -from .defs import (task_states, prod_options_name, - task_state_to_int, task_int_to_state) - -from .db import (all_task_types, DataBaseSqlite, DataBasePostgres, check_tasks, - load_db) - -from .prod import (update_prod, load_prod) - -from .run import (run_task, run_task_simple, run_task_list, run_task_list_db, - dry_run) - -from .scriptgen import (batch_shell, batch_nersc) diff --git a/deprecated/py/desispec/pipeline/control.py b/deprecated/py/desispec/pipeline/control.py deleted file mode 100644 index d304a967f..000000000 --- a/deprecated/py/desispec/pipeline/control.py +++ /dev/null @@ -1,1331 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.control -========================= - -Tools for controling pipeline production. -""" - -from __future__ import absolute_import, division, print_function - -import os -import sys -import re -import time - -from collections import OrderedDict - -import numpy as np - -from desiutil.log import get_logger - -from .. import io - -from ..parallel import (dist_uniform, dist_discrete, dist_discrete_all, - stdouterr_redirected) - -from .defs import (task_states, prod_options_name, - task_state_to_int, task_int_to_state) - -from . import prod as pipeprod -from . import db as pipedb -from . import run as piperun -from . import tasks as pipetasks -from . import scriptgen as scriptgen - - -class clr: - HEADER = "\033[95m" - OKBLUE = "\033[94m" - OKGREEN = "\033[92m" - WARNING = "\033[93m" - FAIL = "\033[91m" - ENDC = "\033[0m" - def disable(self): - self.HEADER = "" - self.OKBLUE = "" - self.OKGREEN = "" - self.WARNING = "" - self.FAIL = "" - self.ENDC = "" - - -def create(root=None, data=None, redux=None, prod=None, force=False, - basis=None, calib=None, db_sqlite=False, db_sqlite_path=None, - db_postgres=False, db_postgres_host="nerscdb03.nersc.gov", - db_postgres_port=5432, db_postgres_name="desidev", - db_postgres_user="desidev_admin", db_postgres_authorized="desidev_ro", - nside=64 ): - """Create (or re-create) a production. - - Args: - root (str): value to use for DESI_ROOT. - data (str): value to use for DESI_SPECTRO_DATA. - redux (str): value to use for DESI_SPECTRO_REDUX. - prod (str): value to use for SPECPROD. - force (bool): if True, overwrite existing production DB. - basis (str): value to use for DESI_BASIS_TEMPLATES. - calib (str): value to use for DESI_SPECTRO_CALIB. - db_sqlite (bool): if True, use SQLite for the DB. - db_sqlite_path (str): override path to SQLite DB. - db_postgres (bool): if True, use PostgreSQL for the DB. - db_postgres_host (str): PostgreSQL hostname. - db_postgres_port (int): PostgreSQL connection port number. - db_postgres_name (str): PostgreSQL DB name. - db_postgres_user (str): PostgreSQL user name. - db_postgres_authorized (str): Additional PostgreSQL users to - authorize. - nside (int): HEALPix nside value used for spectral grouping. - - """ - log = get_logger() - - # Check desi root location - - desiroot = None - if root is not None: - desiroot = os.path.abspath(root) - os.environ["DESI_ROOT"] = desiroot - elif "DESI_ROOT" in os.environ: - desiroot = os.environ["DESI_ROOT"] - else: - log.error("You must set DESI_ROOT in your environment or " - "set the root keyword argument") - raise RuntimeError("Invalid DESI_ROOT") - - # Check raw data location - - rawdir = None - if data is not None: - rawdir = os.path.abspath(data) - os.environ["DESI_SPECTRO_DATA"] = rawdir - elif "DESI_SPECTRO_DATA" in os.environ: - rawdir = os.environ["DESI_SPECTRO_DATA"] - else: - log.error("You must set DESI_SPECTRO_DATA in your environment or " - "set the data keyword argument") - raise RuntimeError("Invalid DESI_SPECTRO_DATA") - - # Check production name - - prodname = None - if prod is not None: - prodname = prod - os.environ["SPECPROD"] = prodname - elif "SPECPROD" in os.environ: - prodname = os.environ["SPECPROD"] - else: - log.error("You must set SPECPROD in your environment or " - "set the prod keyword argument") - raise RuntimeError("Invalid SPECPROD") - - # Check spectro redux location - - specdir = None - if redux is not None: - specdir = os.path.abspath(redux) - os.environ["DESI_SPECTRO_REDUX"] = specdir - elif "DESI_SPECTRO_REDUX" in os.environ: - specdir = os.environ["DESI_SPECTRO_REDUX"] - else: - log.error("You must set DESI_SPECTRO_REDUX in your environment or " - "set the redux keyword argument") - raise RuntimeError("Invalid DESI_SPECTRO_REDUX") - - proddir = os.path.join(specdir, prodname) - if os.path.exists(proddir) and not force : - log.error("Production {} exists.\n" - "Either remove this directory if you want to start fresh\n" - "or use 'desi_pipe update' to update a production\n" - "or rerun with --force option.".format(proddir)) - raise RuntimeError("production already exists") - - # Check basis template location - - if basis is not None: - basis = os.path.abspath(basis) - os.environ["DESI_BASIS_TEMPLATES"] = basis - elif "DESI_BASIS_TEMPLATES" in os.environ: - basis = os.environ["DESI_BASIS_TEMPLATES"] - else: - log.error("You must set DESI_BASIS_TEMPLATES in your environment or " - "set the basis keyword argument") - raise RuntimeError("Invalid DESI_BASIS_TEMPLATES") - - # Check calibration location - - if calib is not None: - calib = os.path.abspath(calib) - os.environ["DESI_SPECTRO_CALIB"] = calib - elif "DESI_SPECTRO_CALIB" in os.environ: - calib = os.environ["DESI_SPECTRO_CALIB"] - else: - log.error("You must set DESI_SPECTRO_CALIB in your environment " - " or set the calib keyword argument") - raise RuntimeError("Invalid DESI_SPECTRO_CALIB") - - # Construct our DB connection string - - dbpath = None - if db_postgres: - # We are creating a new postgres backend. Explicitly create the - # database, so that we can get the schema key. - db = pipedb.DataBasePostgres(host=db_postgres_host, - port=db_postgres_port, dbname=db_postgres_name, - user=db_postgres_user, schema=None, - authorize=db_postgres_authorized) - - dbprops = [ - "postgresql", - db_postgres_host, - "{}".format(db_postgres_port), - db_postgres_name, - db_postgres_user, - db.schema - ] - dbpath = ":".join(dbprops) - os.environ["DESI_SPECTRO_DB"] = dbpath - - elif db_sqlite: - # We are creating a new sqlite backend - if db_sqlite_path is not None: - # We are using a non-default path - dbpath = os.path.abspath(db_sqlite_path) - else: - # We are using sqlite with the default location - dbpath = os.path.join(proddir, "desi.db") - if not os.path.isdir(proddir): - os.makedirs(proddir) - - # Create the database - db = pipedb.DataBaseSqlite(dbpath, "w") - - os.environ["DESI_SPECTRO_DB"] = dbpath - - elif "DESI_SPECTRO_DB" in os.environ: - # We are using an existing prod - dbpath = os.environ["DESI_SPECTRO_DB"] - - else: - # Error- we have to get the DB info from somewhere - log.error("You must set DESI_SPECTRO_DB in your environment or " - "use the db_sqlite or db_postgres arguments") - raise RuntimeError("Invalid DESI_SPECTRO_DB") - - pipeprod.update_prod(nightstr=None, hpxnside=nside) - - # create setup shell snippet - - setupfile = os.path.abspath(os.path.join(proddir, "setup.sh")) - with open(setupfile, "w") as s: - s.write("# Generated by desi_pipe\n") - s.write("export DESI_ROOT={}\n\n".format(desiroot)) - s.write("export DESI_BASIS_TEMPLATES={}\n".format(basis)) - s.write("export DESI_SPECTRO_CALIB={}\n\n".format(calib)) - s.write("export DESI_SPECTRO_DATA={}\n\n".format(rawdir)) - s.write("# Production originally created at\n") - s.write("# $DESI_SPECTRO_REDUX={}\n".format(specdir)) - s.write("# $SPECPROD={}\n".format(prodname)) - s.write("#\n") - s.write("# Support the ability to move the production\n") - s.write("# - get abspath to directory where this script is located\n") - s.write("# - unpack proddir=$DESI_SPECTRO_REDUX/$SPECPROD\n\n") - s.write('proddir=$(cd $(dirname "$BASH_SOURCE"); pwd)\n') - s.write("export DESI_SPECTRO_REDUX=$(dirname $proddir)\n") - s.write("export SPECPROD=$(basename $proddir)\n\n") - # s.write("export DESI_SPECTRO_REDUX={}\n".format(specdir)) - # s.write("export SPECPROD={}\n".format(specprod)) - s.write("export DESI_SPECTRO_DB={}\n".format(dbpath)) - s.write("\n") - if "DESI_LOGLEVEL" in os.environ: - s.write("export DESI_LOGLEVEL=\"{}\"\n\n"\ - .format(os.environ["DESI_LOGLEVEL"])) - else: - s.write("#export DESI_LOGLEVEL=\"DEBUG\"\n\n") - - log.info("\n\nTo use this production, you should do:\n%> source {}\n\n"\ - .format(setupfile)) - - return - - -def update(nightstr=None, nside=64, expid=None): - """Update a production. - - Args: - nightstr (str): Comma separated (YYYYMMDD) or regex pattern. Only - nights matching these patterns will be considered. - nside (int): HEALPix nside value used for spectral grouping. - expid (int): Only update the production for a single exposure ID. - - """ - pipeprod.update_prod(nightstr=nightstr, hpxnside=nside, expid=expid) - - return - - -def get_tasks_type(db, tasktype, states, nights, expid=None, spec=None): - """Get tasks of one type that match certain criteria. - - Args: - db (DataBase): the production DB. - tasktype (str): a valid task type. - states (list): list of task states to select. - nights (list): list of nights to select. - expid (int): exposure ID to select. - spec (int): spectrograph to select. - - Returns: - (list): list of tasks meeting the criteria. - - """ - ntlist = ",".join(nights) - if (expid is not None) and (len(nights) > 1): - raise RuntimeError("Only one night should be specified when " - "getting tasks for a single exposure.") - - tasks = list() - with db.cursor() as cur: - - if tasktype == "spectra" or tasktype == "redshift": - - cmd = "select pixel from healpix_frame where night in ({})".format(ntlist) - cur.execute(cmd) - pixels = np.unique([ x for (x,) in cur.fetchall() ]).tolist() - pixlist = ",".join([ str(p) for p in pixels]) - cmd = "select name,state from {} where pixel in ({})".format(tasktype, pixlist) - cur.execute(cmd) - tasks = [ x for (x, y) in cur.fetchall() if \ - task_int_to_state[y] in states ] - - else : - cmd = "select name, state from {} where night in ({})"\ - .format(tasktype, ntlist) - if expid is not None: - cmd = "{} and expid = {}".format(cmd, expid) - if spec is not None: - cmd = "{} and spec = {}".format(cmd, spec) - cur.execute(cmd) - tasks = [ x for (x, y) in cur.fetchall() if \ - task_int_to_state[y] in states ] - return tasks - - -def get_tasks(db, tasktypes, nights, states=None, expid=None, spec=None, - nosubmitted=False, taskfile=None): - """Get tasks of multiple types that match certain criteria. - - Args: - db (DataBase): the production DB. - tasktypes (list): list of valid task types. - states (list): list of task states to select. - nights (list): list of nights to select. - expid (int): exposure ID to select. - spec (int): spectrograph to select. - nosubmitted (bool): if True, ignore tasks that were already - submitted. - - Returns: - list: all tasks of all types. - - """ - all_tasks = list() - for tt in tasktypes: - tasks = get_tasks_type(db, tt, states, nights, expid=expid, spec=spec) - if nosubmitted: - if (tt != "spectra") and (tt != "redshift"): - sb = db.get_submitted(tasks) - tasks = [ x for x in tasks if not sb[x] ] - all_tasks.extend(tasks) - - return all_tasks - - -def tasks(tasktypes, nightstr=None, states=None, expid=None, spec=None, - nosubmitted=False, db_postgres_user="desidev_ro", taskfile=None): - """Get tasks of multiple types that match certain criteria. - - Args: - tasktypes (list): list of valid task types. - nightstr (list): comma separated (YYYYMMDD) or regex pattern. - states (list): list of task states to select. - expid (int): exposure ID to select. - spec (int): spectrograph to select. - nosubmitted (bool): if True, ignore tasks that were already - submitted. - db_postgres_user (str): If using postgres, connect as this - user for read-only access" - taskfile (str): if set write to this file, else write to STDOUT. - - """ - if states is None: - states = task_states - else: - for s in states: - if s not in task_states: - raise RuntimeError("Task state '{}' is not valid".format(s)) - - dbpath = io.get_pipe_database() - db = pipedb.load_db(dbpath, mode="r", user=db_postgres_user) - - allnights = io.get_nights(strip_path=True) - nights = pipeprod.select_nights(allnights, nightstr) - - ttypes = list() - for tt in pipedb.all_task_types(): - if tt in tasktypes: - ttypes.append(tt) - - all_tasks = get_tasks(db, ttypes, nights, states=states, expid=expid, - spec=spec, nosubmitted=nosubmitted) - - pipeprod.task_write(taskfile, all_tasks) - - return - - -def getready(db, nightstr=None): - """Update forward dependencies in the database. - - Update database for one or more nights to ensure that forward - dependencies know that they are ready to run. - - Args: - db (DataBase): the production DB. - nightstr (list): comma separated (YYYYMMDD) or regex pattern. - - """ - allnights = io.get_nights(strip_path=True) - nights = pipeprod.select_nights(allnights, nightstr) - for nt in nights: - db.getready(night=nt) - return - - -def check_tasks(tasks, db=None): - """Check the state of pipeline tasks. - - If the database handle is given, use the DB for checking. Otherwise - use the filesystem. - - Args: - tasks (list): list of tasks to check. - db (DataBase): the database to use. - - Returns: - OrderedDict: dictionary of the state of each task. - - """ - states = pipedb.check_tasks(tasks, db=db) - - tskstate = OrderedDict() - for tsk in tasks: - tskstate[tsk] = states[tsk] - - return tskstate - - -def sync(db, nightstr=None, specdone=False): - """Synchronize DB state based on the filesystem. - - This scans the filesystem for all tasks for the specified nights, - and updates the states accordingly. - - Args: - db (DataBase): the production DB. - nightstr (list): comma separated (YYYYMMDD) or regex pattern. - specdone: If true, set spectra to done if files exist. - """ - allnights = io.get_nights(strip_path=True) - nights = pipeprod.select_nights(allnights, nightstr) - - for nt in nights: - db.sync(nt,specdone=specdone) - return - - -def cleanup(db, tasktypes, failed=False, submitted=False, expid=None): - """Clean up stale tasks in the DB. - - Args: - db (DataBase): the production DB. - tasktypes (list): list of valid task types. - failed (bool): also clear failed states. - submitted (bool): also clear submitted flag. - expid (int): only clean this exposure ID. - - """ - exid = None - if expid is not None and expid >= 0: - exid = expid - - db.cleanup(tasktypes=tasktypes, expid=exid, cleanfailed=failed, - cleansubmitted=submitted) - return - - -def dryrun(tasks, nersc=None, nersc_queue="regular", nersc_maxtime=0, - nersc_maxnodes=0, nersc_shifter=None, mpi_procs=1, mpi_run="", - procs_per_node=0, nodb=False, db_postgres_user="desidev_ro", force=False): - """Print equivalent command line jobs. - - For the specified tasks, print the equivalent stand-alone commands - that would be run on each task. A pipeline job calls the internal - desispec.scripts entrypoints directly. - - Args: - tasks (list): list of tasks to run. - nersc (str): if not None, the name of the nersc machine to use - (cori-haswell | cori-knl). - nersc_queue (str): the name of the queue to use - (regular | debug | realtime). - nersc_maxtime (int): if specified, restrict the runtime to this - number of minutes. - nersc_maxnodes (int): if specified, restrict the job to use this - number of nodes. - nersc_shifter (str): the name of the shifter image to use. - mpi_run (str): if specified, and if not using NERSC, use this - command to launch MPI executables in the shell scripts. Default - is to not use MPI. - mpi_procs (int): if not using NERSC, the number of MPI processes - to use in shell scripts. - procs_per_node (int): if specified, use only this number of - processes per node. Default runs one process per core. - nodb (bool): if True, do not use the production DB. - db_postgres_user (str): If using postgres, connect as this - user for read-only access" - force (bool): if True, print commands for all tasks, not just the ones - in a ready state. - - """ - tasks_by_type = pipedb.task_sort(tasks) - - (db, opts) = pipeprod.load_prod("r", user=db_postgres_user) - if nodb: - db = None - - ppn = None - if procs_per_node > 0: - ppn = procs_per_node - - if nersc is None: - # Not running at NERSC - if ppn is None: - ppn = mpi_procs - for tt, tlist in tasks_by_type.items(): - piperun.dry_run(tt, tlist, opts, mpi_procs, - ppn, db=db, launch="mpirun -n", force=force) - else: - # Running at NERSC - hostprops = scriptgen.nersc_machine(nersc, - nersc_queue) - - for tt, tlist in tasks_by_type.items(): - joblist = scriptgen.nersc_job_size(tt, tlist, - nersc, nersc_queue, nersc_maxtime, - nersc_maxnodes, nodeprocs=ppn, db=db) - - launch="srun -n" - for (jobnodes, jobppn, jobtime, jobworkers, jobtasks) in joblist: - jobprocs = jobnodes * jobppn - piperun.dry_run(tt, jobtasks, opts, jobprocs, - jobppn, db=db, launch=launch, force=force) - return - - -def gen_scripts(tasks_by_type, nersc=None, nersc_queue="regular", - nersc_maxtime=0, nersc_maxnodes=0, nersc_shifter=None, mpi_procs=1, - mpi_run="", procs_per_node=0, nodb=False, out=None, debug=False, - db_postgres_user="desidev_ro"): - """Generate scripts to run tasks of one or more types. - - If multiple task type keys are contained in the dictionary, they will - be packed into a single batch job. - - Args: - tasks_by_type (dict): each key is the task type and the value is - a list of tasks. - nersc (str): if not None, the name of the nersc machine to use - (cori-haswell | cori-knl). - nersc_queue (str): the name of the queue to use - (regular | debug | realtime). - nersc_maxtime (int): if specified, restrict the runtime to this - number of minutes. - nersc_maxnodes (int): if specified, restrict the job to use this - number of nodes. - nersc_shifter (str): the name of the shifter image to use. - mpi_run (str): if specified, and if not using NERSC, use this - command to launch MPI executables in the shell scripts. Default - is to not use MPI. - mpi_procs (int): if not using NERSC, the number of MPI processes - to use in shell scripts. - procs_per_node (int): if specified, use only this number of - processes per node. Default runs one process per core. - nodb (bool): if True, do not use the production DB. - out (str): Put task scripts and logs in this directory relative to - the production 'scripts' directory. Default puts task directory - in the main scripts directory. - debug (bool): if True, enable DEBUG log level in generated scripts. - db_postgres_user (str): If using postgres, connect as this - user for read-only access" - - Returns: - list: the generated script files - - """ - ttypes = list(tasks_by_type.keys()) - - if len(ttypes)==0 : - return None - - jobname = ttypes[0] - if len(ttypes) > 1: - jobname = "{}-{}".format(ttypes[0], ttypes[-1]) - - proddir = os.path.abspath(io.specprod_root()) - - import datetime - now = datetime.datetime.now() - outtaskdir = "{}_{:%Y%m%d-%H%M%S-%f}".format(jobname, now) - - if out is None: - outdir = os.path.join(proddir, io.get_pipe_rundir(), - io.get_pipe_scriptdir(), outtaskdir) - else: - outdir = os.path.join(proddir, io.get_pipe_rundir(), - io.get_pipe_scriptdir(), out, outtaskdir) - - if not os.path.isdir(outdir): - os.makedirs(outdir) - - mstr = "run" - if nersc is not None: - mstr = nersc - - outscript = os.path.join(outdir, mstr) - outlog = os.path.join(outdir, mstr) - - (db, opts) = pipeprod.load_prod("r", user=db_postgres_user) - if nodb: - db = None - - ppn = None - if procs_per_node > 0: - ppn = procs_per_node - - # FIXME: Add openmp / multiproc function to task classes and - # call them here. - - scripts = None - - if nersc is None: - # Not running at NERSC - scripts = scriptgen.batch_shell(tasks_by_type, - outscript, outlog, mpirun=mpi_run, - mpiprocs=mpi_procs, openmp=1, db=db) - else: - # Running at NERSC - scripts = scriptgen.batch_nersc(tasks_by_type, - outscript, outlog, jobname, nersc, nersc_queue, - nersc_maxtime, nersc_maxnodes, nodeprocs=ppn, - openmp=False, multiproc=False, db=db, - shifterimg=nersc_shifter, debug=debug) - - return scripts - - -def script(taskfile, nersc=None, nersc_queue="regular", - nersc_maxtime=0, nersc_maxnodes=0, nersc_shifter=None, mpi_procs=1, - mpi_run="", procs_per_node=0, nodb=False, out=None, debug=False, - db_postgres_user="desidev_ro"): - """Generate pipeline scripts for a taskfile. - - This gets tasks from the taskfile and sorts them by type. Then it - generates the scripts. - - Args: - taskfile (str): read tasks from this file (if not specified, - read from STDIN). - nersc (str): if not None, the name of the nersc machine to use - (cori-haswell | cori-knl). - nersc_queue (str): the name of the queue to use - (regular | debug | realtime). - nersc_maxtime (int): if specified, restrict the runtime to this - number of minutes. - nersc_maxnodes (int): if specified, restrict the job to use this - number of nodes. - nersc_shifter (str): the name of the shifter image to use. - mpi_run (str): if specified, and if not using NERSC, use this - command to launch MPI executables in the shell scripts. Default - is to not use MPI. - mpi_procs (int): if not using NERSC, the number of MPI processes - to use in shell scripts. - procs_per_node (int): if specified, use only this number of - processes per node. Default runs one process per core. - nodb (bool): if True, do not use the production DB. - out (str): Put task scripts and logs in this directory relative to - the production 'scripts' directory. Default puts task directory - in the main scripts directory. - debug (bool): if True, enable DEBUG log level in generated scripts. - db_postgres_user (str): If using postgres, connect as this - user for read-only access" - - Returns: - list: the generated script files - - """ - tasks = pipeprod.task_read(taskfile) - - scripts = list() - if len(tasks) > 0: - tasks_by_type = pipedb.task_sort(tasks) - scripts = gen_scripts( - tasks_by_type, - nersc=nersc, - nersc_queue=nersc_queue, - nersc_maxtime=nersc_maxtime, - nersc_maxnodes=nersc_maxnodes, - nersc_shifter=nersc_shifter, - mpi_procs=mpi_procs, - mpi_run=mpi_run, - procs_per_node=procs_per_node, - nodb=nodb, - out=out, - debug=debug, - db_postgres_user=db_postgres_user) - else: - import warnings - warnings.warn("Input task list is empty", RuntimeWarning) - - return scripts - - -def run_scripts(scripts, deps=None, slurm=False): - """Run job scripts with optional dependecies. - - This either submits the jobs to the scheduler or simply runs them - in order with subprocess. - - Args: - scripts (list): list of pathnames of the scripts to run. - deps (list): optional list of job IDs which are dependencies for - these scripts. - slurm (bool): if True use slurm to submit the jobs. - - Returns: - list: the job IDs returned by the scheduler. - - """ - import subprocess as sp - - log = get_logger() - - depstr = "" - if deps is not None and len(deps)>0 : - depstr = "-d afterok" - for d in deps: - depstr = "{}:{}".format(depstr, d) - - jobids = list() - if slurm: - # submit each job and collect the job IDs - for scr in scripts: - scom = "sbatch {} {}".format(depstr, scr) - #print("RUN SCRIPTS: {}".format(scom)) - log.debug(time.asctime()) - log.info(scom) - sout = sp.check_output(scom, shell=True, universal_newlines=True) - log.info(sout) - p = sout.split() - jid = re.sub(r'[^\d]', '', p[3]) - jobids.append(jid) - else: - # run the scripts one at a time - for scr in scripts: - rcode = sp.call(scr, shell=True) - if rcode != 0: - log.warning("script {} had return code = {}".format(scr, - rcode)) - return jobids - - -def run(taskfile, nosubmitted=False, depjobs=None, nersc=None, - nersc_queue="regular", nersc_maxtime=0, nersc_maxnodes=0, - nersc_shifter=None, mpi_procs=1, mpi_run="", procs_per_node=0, nodb=False, - out=None, debug=False): - """Create job scripts and run them. - - This gets tasks from the taskfile and sorts them by type. Then it - generates the scripts. Finally, it runs or submits those scripts - to the scheduler. - - Args: - taskfile (str): read tasks from this file (if not specified, - read from STDIN). - nosubmitted (bool): if True, do not run jobs that have already - been submitted. - depjobs (list): list of job ID dependencies. - nersc (str): if not None, the name of the nersc machine to use - (cori-haswell | cori-knl). - nersc_queue (str): the name of the queue to use - (regular | debug | realtime). - nersc_maxtime (int): if specified, restrict the runtime to this - number of minutes. - nersc_maxnodes (int): if specified, restrict the job to use this - number of nodes. - nersc_shifter (str): the name of the shifter image to use. - mpi_run (str): if specified, and if not using NERSC, use this - command to launch MPI executables in the shell scripts. Default - is to not use MPI. - mpi_procs (int): if not using NERSC, the number of MPI processes - to use in shell scripts. - procs_per_node (int): if specified, use only this number of - processes per node. Default runs one process per core. - nodb (bool): if True, do not use the production DB. - out (str): Put task scripts and logs in this directory relative to - the production 'scripts' directory. Default puts task directory - in the main scripts directory. - debug (bool): if True, enable DEBUG log level in generated scripts. - - Returns: - list: the job IDs returned by the scheduler. - - """ - log = get_logger() - tasks = pipeprod.task_read(taskfile) - - jobids = list() - - if len(tasks) > 0: - tasks_by_type = pipedb.task_sort(tasks) - tasktypes = list(tasks_by_type.keys()) - # We are packing everything into one job - scripts = gen_scripts( - tasks_by_type, - nersc=nersc, - nersc_queue=nersc_queue, - nersc_maxtime=nersc_maxtime, - nersc_maxnodes=nersc_maxnodes, - nersc_shifter=nersc_shifter, - mpi_procs=mpi_procs, - mpi_run=mpi_run, - procs_per_node=procs_per_node, - nodb=nodb, - out=out, - debug=debug) - - log.info("wrote scripts {}".format(scripts)) - - deps = None - slurm = False - if nersc is not None: - slurm = True - if depjobs is not None: - deps = depjobs - - # Run the jobs - if not nodb: - # We can use the DB, mark tasks as submitted. - if slurm: - dbpath = io.get_pipe_database() - db = pipedb.load_db(dbpath, mode="w") - for tt in tasktypes: - if (tt != "spectra") and (tt != "redshift"): - db.set_submitted_type(tt, tasks_by_type[tt]) - - jobids = run_scripts(scripts, deps=deps, slurm=slurm) - else: - import warnings - warnings.warn("Input task list is empty", RuntimeWarning) - - return jobids - - -def chain(tasktypes, nightstr=None, states=None, expid=None, spec=None, - pack=False, nosubmitted=False, depjobs=None, nersc=None, - nersc_queue="regular", nersc_maxtime=0, nersc_maxnodes=0, - nersc_shifter=None, mpi_procs=1, mpi_run="", procs_per_node=0, nodb=False, - out=None, debug=False, dryrun=False): - """Run a chain of jobs for multiple pipeline steps. - - For the list of task types, get all ready tasks meeting the selection - criteria. Then either pack all tasks into one job or submit - each task type as its own job. Input job dependencies can be - specified, and dependencies are tracked between jobs in the chain. - - Args: - tasktypes (list): list of valid task types. - nightstr (str): Comma separated (YYYYMMDD) or regex pattern. Only - nights matching these patterns will be considered. - states (list): list of task states to select. - nights (list): list of nights to select. - expid (int): exposure ID to select. - pack (bool): if True, pack all tasks into a single job. - nosubmitted (bool): if True, do not run jobs that have already - been submitted. - depjobs (list): list of job ID dependencies. - nersc (str): if not None, the name of the nersc machine to use - (cori-haswell | cori-knl). - nersc_queue (str): the name of the queue to use - (regular | debug | realtime). - nersc_maxtime (int): if specified, restrict the runtime to this - number of minutes. - nersc_maxnodes (int): if specified, restrict the job to use this - number of nodes. - nersc_shifter (str): the name of the shifter image to use. - mpi_run (str): if specified, and if not using NERSC, use this - command to launch MPI executables in the shell scripts. Default - is to not use MPI. - mpi_procs (int): if not using NERSC, the number of MPI processes - to use in shell scripts. - procs_per_node (int): if specified, use only this number of - processes per node. Default runs one process per core. - nodb (bool): if True, do not use the production DB. - out (str): Put task scripts and logs in this directory relative to - the production 'scripts' directory. Default puts task directory - in the main scripts directory. - debug (bool): if True, enable DEBUG log level in generated scripts. - dryrun (bool): if True, do not submit the jobs. - - Returns: - list: the job IDs from the final step in the chain. - - """ - - log = get_logger() - - machprops = None - if nersc is not None: - machprops = scriptgen.nersc_machine(nersc, nersc_queue) - - if states is None: - states = task_states - else: - for s in states: - if s not in task_states: - raise RuntimeError("Task state '{}' is not valid".format(s)) - - ttypes = list() - for tt in pipetasks.base.default_task_chain: - if tt in tasktypes: - ttypes.append(tt) - - if (machprops is not None) and (not pack): - if len(ttypes) > machprops["submitlimit"]: - log.error("Queue {} on machine {} limited to {} jobs."\ - .format(nersc_queue, nersc, - machprops["submitlimit"])) - log.error("Use a different queue or shorter chains of tasks.") - raise RuntimeError("Too many jobs") - - slurm = False - if nersc is not None: - slurm = True - - dbpath = io.get_pipe_database() - db = pipedb.load_db(dbpath, mode="w") - - allnights = io.get_nights(strip_path=True) - nights = pipeprod.select_nights(allnights, nightstr) - - outdeps = None - indeps = None - if depjobs is not None: - indeps = depjobs - - tasks_by_type = OrderedDict() - - for tt in ttypes: - # Get the tasks. We select by state and submitted status. - tasks = get_tasks_type(db, tt, states, nights, expid=expid, spec=spec) - #print("CHAIN: ", tt, tasks) - if nosubmitted: - if (tt != "spectra") and (tt != "redshift"): - sb = db.get_submitted(tasks) - tasks = [ x for x in tasks if not sb[x] ] - #print("CHAIN: nosubmitted: ", tt, tasks) - - if len(tasks) == 0: - import warnings - warnings.warn("Input task list for '{}' is empty".format(tt), - RuntimeWarning) - continue # might be tasks to do in other ttype - tasks_by_type[tt] = tasks - - scripts = None - tscripts = None - if pack: - # We are packing everything into one job - scripts = gen_scripts( - tasks_by_type, - nersc=nersc, - nersc_queue=nersc_queue, - nersc_maxtime=nersc_maxtime, - nersc_maxnodes=nersc_maxnodes, - nersc_shifter=nersc_shifter, - mpi_procs=mpi_procs, - mpi_run=mpi_run, - procs_per_node=procs_per_node, - nodb=nodb, - out=out, - debug=debug) - if scripts is not None and len(scripts)>0 : - log.info("wrote scripts {}".format(scripts)) - else: - # Generate individual scripts - tscripts = dict() - for tt in ttypes: - onetype = OrderedDict() - onetype[tt] = tasks_by_type[tt] - tscripts[tt] = gen_scripts( - onetype, - nersc=nersc, - nersc_queue=nersc_queue, - nersc_maxtime=nersc_maxtime, - nersc_maxnodes=nersc_maxnodes, - nersc_shifter=nersc_shifter, - mpi_procs=mpi_procs, - mpi_run=mpi_run, - procs_per_node=procs_per_node, - nodb=nodb, - out=out, - debug=debug) - if tscripts[tt] is not None : - log.info("wrote script {}".format(tscripts[tt])) - - if dryrun : - log.warning("dry run: do not submit the jobs") - return None - - # Run the jobs - if slurm: - for tt in ttypes: - if (tt != "spectra") and (tt != "redshift"): - if tt in tasks_by_type.keys() : - db.set_submitted_type(tt, tasks_by_type[tt]) - - outdeps = None - if pack: - # Submit one job - if scripts is not None and len(scripts)>0 : - outdeps = run_scripts(scripts, deps=indeps, slurm=slurm) - else: - # Loop over task types submitting jobs and tracking dependencies. - for tt in ttypes: - if tscripts[tt] is not None : - outdeps = run_scripts(tscripts[tt], deps=indeps, - slurm=slurm) - if outdeps is not None and len(outdeps) > 0: - indeps = outdeps - else: - indeps = None - - return outdeps - - -def status_color(state): - col = clr.ENDC - if state == "done": - col = clr.OKGREEN - elif state == "running": - col = clr.WARNING - elif state == "failed": - col = clr.FAIL - elif state == "ready": - col = clr.OKBLUE - return col - - -def status_task(task, ttype, state, logdir): - fields = pipetasks.base.task_classes[ttype].name_split(task) - tasklog = None - if "night" in fields: - tasklogdir = os.path.join( - logdir, io.get_pipe_nightdir(), - "{:08d}".format(fields["night"]) - ) - tasklog = os.path.join( - tasklogdir, - "{}.log".format(task) - ) - elif "pixel" in fields: - tasklogdir = os.path.join( - logdir, "healpix", - io.healpix_subdirectory(fields["nside"],fields["pixel"]) - ) - tasklog = os.path.join( - tasklogdir, - "{}.log".format(task) - ) - col = status_color(state) - print("Task {}".format(task)) - print( - "State = {}{}{}".format( - col, - state, - clr.ENDC - ) - ) - if os.path.isfile(tasklog): - print("Dumping task log {}".format(tasklog)) - print("=========== Begin Log =============") - print("") - with open(tasklog, "r") as f: - logdata = f.read() - print(logdata) - print("") - print("============ End Log ==============") - print("", flush=True) - else: - print("Task log {} does not exist".format(tasklog), flush=True) - return - - -def status_taskname(tsklist): - for tsk in tsklist: - st = tsk[1] - col = status_color(st) - print( - " {:20s}: {}{}{}".format(tsk[0], col, st, clr.ENDC), - flush=True - ) - - -def status_night_totals(tasktypes, nights, tasks, tskstates): - # Accumulate totals for each night and type - sep = "------------------+---------+---------+---------+---------+---------+" - ntlist = list() - nighttot = OrderedDict() - for tt in tasktypes: - if tt == "spectra" or tt == "redshift": - # This function only prints nightly tasks - continue - for tsk in tasks[tt]: - fields = pipetasks.base.task_classes[tt].name_split(tsk) - nt = fields["night"] - if nt not in nighttot: - nighttot[nt] = OrderedDict() - if tt not in nighttot[nt]: - nighttot[nt][tt] = OrderedDict() - for s in task_states: - nighttot[nt][tt][s] = 0 - st = tskstates[tt][tsk] - nighttot[nt][tt][st] += 1 - for nt, ttstates in nighttot.items(): - ntstr = "{:08d}".format(nt) - if ntstr in nights: - ntlist.append(nt) - ntlist = list(sorted(ntlist)) - for nt in ntlist: - ttstates = nighttot[nt] - ntstr = "{:08d}".format(nt) - if ntstr in nights: - header = "{:18s}|".format(ntstr) - for s in task_states: - col = status_color(s) - header = "{} {}{:8s}{}|".format( - header, col, s, clr.ENDC - ) - print(sep) - print(header) - print(sep) - for tt, totst in ttstates.items(): - line = " {:16s}|".format(tt) - for s in task_states: - line = "{}{:9d}|".format(line, totst[s]) - print(line) - print("", flush=True) - - -def status_pixel_totals(tasktypes, tasks, tskstates): - # Accumulate totals for each type - sep = "------------------+---------+---------+---------+---------+---------+" - pixtot = OrderedDict() - for tt in tasktypes: - if (tt != "spectra") and (tt != "redshift"): - # This function only prints pixel tasks - continue - for tsk in tasks[tt]: - if tt not in pixtot: - pixtot[tt] = OrderedDict() - for s in task_states: - pixtot[tt][s] = 0 - st = tskstates[tt][tsk] - pixtot[tt][st] += 1 - header = "{:18s}|".format("Pixel Tasks") - for s in task_states: - col = status_color(s) - header = "{} {}{:8s}{}|".format( - header, col, s, clr.ENDC - ) - print(sep) - print(header) - print(sep) - for tt, totst in pixtot.items(): - line = " {:16s}|".format(tt) - for s in task_states: - line = "{}{:9d}|".format(line, totst[s]) - print(line) - print("", flush=True) - - -def status_night_tasks(tasktypes, nights, tasks, tskstates): - # Sort the tasks into nights - nighttasks = OrderedDict() - ntlist = list() - for tt in tasktypes: - if tt == "spectra" or tt == "redshift": - # This function only prints nightly tasks - continue - for tsk in tasks[tt]: - fields = pipetasks.base.task_classes[tt].name_split(tsk) - nt = fields["night"] - if nt not in nighttasks: - nighttasks[nt] = list() - nighttasks[nt].append((tsk, tskstates[tt][tsk])) - for nt, tsklist in nighttasks.items(): - ntstr = "{:08d}".format(nt) - if ntstr in nights: - ntlist.append(nt) - ntlist = list(sorted(ntlist)) - for nt in ntlist: - tsklist = nighttasks[nt] - ntstr = "{:08d}".format(nt) - if ntstr in nights: - print(nt) - status_taskname(tsklist) - - -def status_pixel_tasks(tasktypes, tasks, tskstates): - for tt in tasktypes: - tsklist = list() - if (tt != "spectra") and (tt != "redshift"): - # This function only prints pixel tasks - continue - for tsk in tasks[tt]: - tsklist.append((tsk, tskstates[tt][tsk])) - print(tt) - status_taskname(tsklist) - - -def status_summary(tasktypes, nights, tasks, tskstates): - sep = "----------------+---------+---------+---------+---------+---------+" - hline = "-----------------------------------------------" - print(sep) - header_state = "{:16s}|".format(" Task Type") - for s in task_states: - col = status_color(s) - header_state = "{} {}{:8s}{}|".format( - header_state, col, s, clr.ENDC - ) - print(header_state) - print(sep) - for tt in tasktypes: - line = "{:16s}|".format(tt) - for s in task_states: - tsum = np.sum( - np.array( - [1 for x, y in tskstates[tt].items() if y == s], - dtype=np.int32 - ) - ) - line = "{}{:9d}|".format(line, tsum) - print(line, flush=True) - - -def status(task=None, tasktypes=None, nightstr=None, states=None, - expid=None, spec=None, db_postgres_user="desidev_ro"): - """Check the status of pipeline tasks. - - - Args: - - Returns: - None - - """ - dbpath = io.get_pipe_database() - db = pipedb.load_db(dbpath, mode="r", user=db_postgres_user) - - rundir = io.get_pipe_rundir() - logdir = os.path.join(rundir, io.get_pipe_logdir()) - - tasks = OrderedDict() - - summary = False - if (tasktypes is None) and (nightstr is None): - summary = True - - if task is None: - ttypes = None - if tasktypes is not None: - ttypes = list() - for tt in pipetasks.base.default_task_chain: - if tt in tasktypes: - ttypes.append(tt) - else: - ttypes = list(pipetasks.base.default_task_chain) - - if states is None: - states = task_states - else: - for s in states: - if s not in task_states: - raise RuntimeError("Task state '{}' is not valid".format(s)) - - allnights = io.get_nights(strip_path=True) - nights = pipeprod.select_nights(allnights, nightstr) - - for tt in ttypes: - tasks[tt] = get_tasks( - db, [tt], nights, states=states, expid=expid, spec=spec - ) - else: - ttypes = [pipetasks.base.task_type(task)] - tasks[ttypes[0]] = [task] - - tstates = OrderedDict() - for typ, tsks in tasks.items(): - tstates[typ] = pipedb.check_tasks(tsks, db=db) - - if len(ttypes) == 1 and len(tasks[ttypes[0]]) == 1: - # Print status of this specific task - thistype = ttypes[0] - thistask = tasks[thistype][0] - status_task(thistask, thistype, tstates[thistype][thistask], logdir) - else: - if len(ttypes) > 1 and len(nights) > 1: - # We have multiple nights and multiple task types. - # Just print totals. - if summary: - status_summary(ttypes, nights, tasks, tstates) - else: - status_night_totals(ttypes, nights, tasks, tstates) - status_pixel_totals(ttypes, tasks, tstates) - elif len(ttypes) > 1: - # Multiple task types for one night. Print the totals for each - # task type. - thisnight = nights[0] - status_night_totals(ttypes, nights, tasks, tstates) - elif len(nights) > 1: - # We have just one task type, print the state totals for each night - # OR the full task list for redshift or spectra tasks. - thistype = ttypes[0] - print("Task type {}".format(thistype)) - if thistype == "spectra" or thistype == "redshift": - status_pixel_tasks(ttypes, tasks, tstates) - else: - status_night_totals(ttypes, nights, tasks, tstates) - else: - # We have one type and one night, print the full state of every - # task. - thistype = ttypes[0] - thisnight = nights[0] - print("Task type {}".format(thistype)) - status_night_tasks(ttypes, nights, tasks, tstates) - status_pixel_tasks(ttypes, tasks, tstates) - - return diff --git a/deprecated/py/desispec/pipeline/db.py b/deprecated/py/desispec/pipeline/db.py deleted file mode 100644 index b34f38aa5..000000000 --- a/deprecated/py/desispec/pipeline/db.py +++ /dev/null @@ -1,1319 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.db -==================== - -Pipeline processing database -""" - -from __future__ import absolute_import, division, print_function - -import os - -import re -from collections import OrderedDict - -from contextlib import contextmanager - -import numpy as np - -from desiutil.log import get_logger - -from .. import io - -import fitsio - -from .defs import (task_states, task_int_to_state, task_state_to_int, task_name_sep) - - -def all_task_types(): - """Get the list of possible task types that are supported. - - Returns: - list: The list of supported task types. - - """ - from . import tasks - from .tasks.base import default_task_chain - ttypes = ["fibermap", "rawdata"] - ttypes.extend(tasks.base.default_task_chain) - # Insert qadata after cframe - idx = ttypes.index('cframe') - ttypes.insert(idx+1, 'qadata') - return ttypes - - -def task_sort(tasks): - """Sort a list of tasks by type. - - This takes a list of arbitrary tasks and sorts them by type. The result - is placed in an ordered dictionary of lists in run order. - - Args: - tasks (list): the list of input tasks. - - Returns: - (OrderedDict): ordered dictionary of tasks sorted by type. - - """ - from .tasks.base import task_classes, task_type - sort = dict() - ttypes = all_task_types() - for tp in ttypes: - sort[tp] = list() - - for tsk in tasks: - sort[task_type(tsk)].append(tsk) - - ret = OrderedDict() - for tp in ttypes: - if len(sort[tp]) > 0: - ret[tp] = sort[tp] - return ret - - -def all_tasks(night, nside, expid=None): - """Get all possible tasks for a single night. - - This uses the filesystem to query the raw data for a particular night and - return a dictionary containing all possible tasks for each task type. For - objects which span multiple nights (e.g. spectra, redrock), this returns the - tasks which are touched by the given night. - - Args: - night (str): The night to scan for tasks. - nside (int): The HEALPix NSIDE value to use. - expid (int): Only get tasks for this single exposure. - - Returns: - dict: a dictionary whose keys are the task types and where each value - is a list of task properties. - - """ - import desimodel.footprint - - log = get_logger() - - log.debug("io.get_exposures night={}".format(night)) - - expids = io.get_exposures(night, raw=True) - - full = dict() - for t in all_task_types(): - full[t] = list() - - healpix_frames = [] - - if expid is not None: - if expid not in expids: - raise RuntimeError("exposure ID {} not valid for night {}"\ - .format(expid, night)) - expids = [ expid ] - - for ex in sorted(expids): - - # get the fibermap for this exposure - fibermap = io.get_raw_files("fibermap", night, ex) - - log.debug("read {}".format(fibermap)) - - fmdata = io.read_fibermap(fibermap) - header = fmdata.meta - - # fmdata, header = fitsio.read(fibermap, 'FIBERMAP', header=True) - flavor = header["FLAVOR"].strip().lower() - if flavor not in ["arc","flat","science"] : - log.error("Do not know what do to with fibermap flavor '{}' for file '{}".format(flavor,fibermap)) - raise ValueError("Do not know what do to with fibermap flavor '{}' for file '{}".format(flavor,fibermap)) - - fmpix = dict() - if (flavor != "arc") and (flavor != "flat"): - # This will be used to track which healpix pixels are - # touched by fibers from each spectrograph. - ra = np.array(fmdata["TARGET_RA"], dtype=np.float64) - dec = np.array(fmdata["TARGET_DEC"], dtype=np.float64) - - # rm NaN (possible depending on versions of fiberassign) - valid_coordinates = (np.isnan(ra)==False)&(np.isnan(dec)==False) - - for spectro in np.unique( fmdata["SPECTROID"] ) : - ii=np.where(fmdata["SPECTROID"][valid_coordinates]==spectro)[0] - if ii.size == 0 : continue - pixels = desimodel.footprint.radec2pix(nside, ra[valid_coordinates][ii], dec[valid_coordinates][ii]) - for pixel in np.unique(pixels) : - props = dict() - props["night"] = int(night) - props["expid"] = int(ex) - props["spec"] = spectro - props["nside"] = nside - props["pixel"] = pixel - props["ntargets"] = np.sum(pixels==pixel) - healpix_frames.append(props) - # all spectro at once - pixels = np.unique(desimodel.footprint.radec2pix(nside, ra[valid_coordinates], dec[valid_coordinates])) - for pixel in pixels : - props = dict() - props["pixel"] = pixel - props["nside"] = nside - props["state"] = "waiting" - exists=False - for entry in full["spectra"] : - if entry["pixel"]==props["pixel"] : - exists=True - break - if not exists : full["spectra"].append(props) - exists=False - for entry in full["redshift"] : - if entry["pixel"]==props["pixel"] : - exists=True - break - if not exists : full["redshift"].append(props) - - fmprops = dict() - fmprops["night"] = int(night) - fmprops["expid"] = int(ex) - fmprops["flavor"] = flavor - fmprops["state"] = "done" - - full["fibermap"].append(fmprops) - - rdprops = dict() - rdprops["night"] = int(night) - rdprops["expid"] = int(ex) - rdprops["flavor"] = flavor - rdprops["state"] = "done" - - full["rawdata"].append(rdprops) - - # Add the preprocessed pixel files - for band in ['b', 'r', 'z']: - # need to open the rawdata file to see how many spectros - # and cameras are there - for spec in np.unique( fmdata["SPECTROID"] ) : - pixprops = dict() - pixprops["night"] = int(night) - pixprops["band"] = band - pixprops["spec"] = spec - pixprops["expid"] = int(ex) - pixprops["flavor"] = flavor - pixprops["state"] = "ready" - full["preproc"].append(pixprops) - - if flavor == "arc" : - # Add the PSF files - props = dict() - props["night"] = int(night) - props["band"] = band - props["spec"] = spec - props["expid"] = int(ex) - props["state"] = "waiting" # see defs.task_states - full["psf"].append(props) - - # Add a PSF night file if does not exist - exists=False - for entry in full["psfnight"] : - if entry["night"]==props["night"] \ - and entry["band"]==props["band"] \ - and entry["spec"]==props["spec"] : - exists=True - break - if not exists : - props = dict() - props["night"] = int(night) - props["band"] = band - props["spec"] = spec - props["state"] = "waiting" # see defs.task_states - full["psfnight"].append(props) - - if flavor != "arc" : - # Add extractions - props = dict() - props["night"] = int(night) - props["band"] = band - props["spec"] = spec - props["expid"] = int(ex) - props["state"] = "waiting" # see defs.task_states - - # Add traceshift - full["traceshift"].append(props) - - # Add extractions - full["extract"].append(props) - - if flavor == "flat" : - # Add a fiberflat task - props = dict() - props["night"] = int(night) - props["band"] = band - props["spec"] = spec - props["expid"] = int(ex) - props["state"] = "waiting" # see defs.task_states - full["fiberflat"].append(props) - # Add a fiberflat night file if does not exist - exists=False - for entry in full["fiberflatnight"] : - if entry["night"]==props["night"] \ - and entry["band"]==props["band"] \ - and entry["spec"]==props["spec"] : - exists=True - break - if not exists : - props = dict() - props["night"] = int(night) - props["band"] = band - props["spec"] = spec - props["state"] = "waiting" # see defs.task_states - full["fiberflatnight"].append(props) - - if flavor != "arc" and flavor != "flat": - # Add sky - props = dict() - props["night"] = int(night) - props["band"] = band - props["spec"] = spec - props["expid"] = int(ex) - props["state"] = "waiting" # see defs.task_states - full["sky"].append(props) - # Add fluxcalib - full["fluxcalib"].append(props) - # Add cframe - full["cframe"].append(props) - # Add QA - full["qadata"].append(props) - - # Add starfit if does not exist - exists=False - for entry in full["starfit"] : - if entry["night"]==props["night"] \ - and entry["expid"]==props["expid"] \ - and entry["spec"]==props["spec"] : - exists=True - break - if not exists : - props = dict() - props["night"] = int(night) - props["expid"] = int(ex) - props["spec"] = spec - props["state"] = "waiting" # see defs.task_states - full["starfit"].append(props) - - log.debug("done") - return full , healpix_frames - - -def check_tasks(tasklist, db=None, inputs=None): - """Check a list of tasks and return their state. - - If the database is specified, it is used to check the state of the tasks - and their dependencies. Otherwise the filesystem is checked. - - Args: - tasklist (list): list of tasks. - db (pipeline.db.DB): The optional database to use. - inputs (dict): optional dictionary containing the only input - dependencies that should be considered. - - Returns: - dict: The current state of all tasks. - - """ - from .tasks.base import task_classes, task_type - states = dict() - - if db is None: - # Check the filesystem to see which tasks are done. Since we don't - # have a DB, we can only distinguish between "waiting", "ready", and - # "done" states. - for tsk in tasklist: - tasktype = task_type(tsk) - st = "waiting" - - # Check dependencies - deps = task_classes[tasktype].deps(tsk, db=db, inputs=inputs) - - if len(deps)==0 : - # do not set state to ready of tasks with 0 dependencies - ready = False - else : - ready = True - for k, v in deps.items(): - if not isinstance(v, list): - v = [ v ] - for dp in v: - deptype = task_type(dp) - depfiles = task_classes[deptype].paths(dp) - for odep in depfiles: - if not os.path.isfile(odep): - ready = False - break - if ready: - st = "ready" - - done = True - # Check outputs - outfiles = task_classes[tasktype].paths(tsk) - for out in outfiles: - if not os.path.isfile(out): - done = False - break - if done: - st = "done" - - states[tsk] = st - else: - states = db.get_states(tasklist) - - return states - - -class DataBase: - """Class for tracking pipeline processing objects and state. - """ - def __init__(self): - self._conn = None - return - - - def get_states_type(self, tasktype, tasks): - """Efficiently get the state of many tasks of a single type. - - Args: - tasktype (str): the type of these tasks. - tasks (list): list of task names. - - Returns: - dict: the state of each task. - - """ - states = None - namelist = ",".join([ "'{}'".format(x) for x in tasks ]) - - log = get_logger() - log.debug("opening db") - - with self.cursor() as cur: - log.debug("selecting in db") - cur.execute(\ - 'select name, state from {} where name in ({})'.format(tasktype, - namelist)) - st = cur.fetchall() - log.debug("done") - states = { x[0] : task_int_to_state[x[1]] for x in st } - return states - - - def count_task_states(self, tasktype): - """Return a dictionary of how many tasks are in each state - - Args: - tasktype (str): the type of these tasks. - - Returns: - dict: keyed by state, values are number of tasks in that state0 - """ - state_count = OrderedDict() - for state in task_states: - state_count[state] = 0 - - with self.cursor() as cur: - cur.execute( 'select name, state from {}'.format(tasktype)) - for name, intstate in cur.fetchall(): - state_count[task_int_to_state[intstate]] += 1 - - return state_count - - - def get_states(self, tasks): - """Efficiently get the state of many tasks at once. - - Args: - tasks (list): list of task names. - - Returns: - dict: the state of each task. - - """ - from .tasks.base import task_classes, task_type - - # Sort by type - taskbytype = task_sort(tasks) - - # Get state of each type - states = dict() - for t, tlist in taskbytype.items(): - states.update(self.get_states_type(t, tlist)) - - return states - - - def set_states_type(self, tasktype, tasks, postprocessing=True): - """Efficiently get the state of many tasks of a single type. - - Args: - tasktype (str): the type of these tasks. - tasks (list): list of tuples containing the task name and the - state to set. - - Returns: - Nothing. - - """ - from .tasks.base import task_classes - - log = get_logger() - log.debug("opening db") - - with self.cursor() as cur: - log.debug("updating in db") - for tsk in tasks: - cur.execute("update {} set state = {} where name = '{}'".format(tasktype, task_state_to_int[tsk[1]], tsk[0])) - if postprocessing and tsk[1]=="done" : - task_classes[tasktype].postprocessing(db=self,name=tsk[0],cur=cur) - log.debug("done") - return - - - def set_states(self, tasks): - """Efficiently set the state of many tasks at once. - - Args: - tasks (list): list of tuples containing the task name and the - state to set. - - Returns: - Nothing. - - """ - from .tasks.base import task_classes, task_type - # First find the type of each task. - ttypes = dict() - for tsk in tasks: - ttypes[tsk[0]] = task_type(tsk[0]) - - # Sort tasks into types - taskbytype = dict() - for t in all_task_types(): - taskbytype[t] = list() - for tsk in tasks: - taskbytype[ttypes[tsk[0]]].append(tsk) - - # Process each type - for t, tlist in taskbytype.items(): - if len(tlist) > 0: - self.set_states_type(t, tlist) - return - - - def get_submitted(self, tasks): - """Return the submitted flag for the list of tasks. - - Args: - tasks (list): list of task names. - - Returns: - (dict): the boolean submitted state of each task (True means that - the task has been submitted). - - """ - from .tasks.base import task_type - # Sort by type - taskbytype = task_sort(tasks) - - # Process each type - submitted = dict() - for t, tlist in taskbytype.items(): - if (t == "spectra") or (t == "redshift"): - raise RuntimeError("spectra and redshift tasks do not have submitted flag.") - namelist = ",".join([ "'{}'".format(x) for x in tlist ]) - with self.cursor() as cur: - cur.execute(\ - 'select name, submitted from {} where name in ({})'.format(t, namelist)) - sb = cur.fetchall() - submitted.update({ x[0] : x[1] for x in sb }) - return submitted - - - def set_submitted_type(self, tasktype, tasks, unset=False): - """Flag a list of tasks of a single type as submitted. - - Args: - tasktype (str): the type of these tasks. - tasks (list): list of task names. - unset (bool): if True, invert the behavior and unset the submitted - flag for these tasks. - - Returns: - Nothing. - - """ - val = 1 - if unset: - val = 0 - with self.cursor() as cur: - for tsk in tasks: - cur.execute("update {} set submitted = {} where name = '{}'".format(tasktype, val, tsk)) - return - - - def set_submitted(self, tasks, unset=False): - """Flag a list of tasks as submitted. - - Args: - tasks (list): list of task names. - unset (bool): if True, invert the behavior and unset the submitted - flag for these tasks. - - Returns: - Nothing. - - """ - from .tasks.base import task_type - # Sort by type - taskbytype = task_sort(tasks) - - # Process each type - for t, tlist in taskbytype.items(): - if (t == "spectra") or (t == "redshift"): - raise RuntimeError("spectra and redshift tasks do not have submitted flag.") - self.set_submitted_type(tlist, unset=unset) - return - - - def update(self, night, nside, expid=None): - """Update DB based on raw data. - - This will use the usual io.meta functions to find raw exposures. For - each exposure, the fibermap and all following objects will be added to - the DB. - - Args: - night (str): The night to scan for updates. - nside (int): The current NSIDE value used for pixel grouping. - expid (int): Only update the DB for this exposure. - - """ - from .tasks.base import task_classes, task_type - - log = get_logger() - - alltasks, healpix_frames = all_tasks(night, nside, expid=expid) - - with self.cursor() as cur: - # insert or ignore all healpix_frames - log.debug("updating healpix_frame ...") - for entry in healpix_frames: - # see if we already have this entry - cmd = "select exists(select 1 from healpix_frame where (expid = {} and spec = {} and nside = {} and pixel = {} ))".format(entry["expid"], entry["spec"], entry["nside"], entry["pixel"]) - cur.execute(cmd) - have_row = cur.fetchone()[0] - - if not have_row: - cur.execute("insert into healpix_frame (night,expid,spec,nside,pixel,ntargets,state) values({},{},{},{},{},{},{})".format(entry["night"],entry["expid"],entry["spec"],entry["nside"],entry["pixel"],entry["ntargets"],0)) - - # read what is already in db - tasks_in_db = {} - for tt in all_task_types(): - cur.execute("select name from {}".format(tt)) - tasks_in_db[tt] = [ x for (x, ) in cur.fetchall()] - - for tt in all_task_types(): - log.debug("updating {} ...".format(tt)) - for tsk in alltasks[tt]: - tname = task_classes[tt].name_join(tsk) - if tname not in tasks_in_db[tt] : - log.debug("adding {}".format(tname)) - task_classes[tt].insert(cur, tsk) - - return - - - def sync(self, night, specdone=False): - """Update states of tasks based on filesystem. - - Go through all tasks in the DB for the given night and determine their - state on the filesystem. Then update the DB state to match. - - Args: - night (str): The night to scan for updates. - specdone: If true, set spectra to done if files exist. - """ - from .tasks.base import task_classes - log = get_logger() - - # Get the list of task types excluding spectra and redshifts, - # which will be handled separately. - ttypes = [ t for t in all_task_types() if (t != "spectra") \ - and (t != "redshift") ] - - tasks_in_db = None - # Grab existing nightly tasks - with self.cursor() as cur: - tasks_in_db = {} - for tt in ttypes: - cur.execute("select name from {} where night = {}"\ - .format(tt, night)) - tasks_in_db[tt] = [ x for (x, ) in cur.fetchall() ] - - # For each task type, check status WITHOUT the DB, then set state. - # Save out the cframe states for later use with the healpix_frame table - cfstates = None - for tt in ttypes: - tstates = check_tasks(tasks_in_db[tt], db=None) - st = [ (x, tstates[x]) for x in tasks_in_db[tt] ] - self.set_states_type(tt, st) - if tt == "cframe": - cfstates = tstates.copy() - - # Now examine the spectra and redshift files. If the files exist, - # we assume they are done and completely up to date. If the files - # are not up to date, they must be manually deleted in order for the - # sync to correctly reconstruct the database state. - - pixrows = self.select_healpix_frame({"night" : night}) - # First check the existence of the files touched by this night - spec_exists = dict() - red_exists = dict() - for row in pixrows: - if row["pixel"] in spec_exists: - continue - spec_name = task_classes["spectra"].name_join(row) - red_name = task_classes["redshift"].name_join(row) - - # Check spectra outputs - outfiles = task_classes["spectra"].paths(spec_name) - spec_exists[row["pixel"]] = True - for out in outfiles: - if not os.path.isfile(out): - spec_exists[row["pixel"]] = False - break - - # Check redshift outputs - outfiles = task_classes["redshift"].paths(red_name) - red_exists[row["pixel"]] = True - for out in outfiles: - if not os.path.isfile(out): - red_exists[row["pixel"]] = False - break - - # Now use all this info. Some internal helpers to avoid code - # duplication - def set_hpx_frame_0(row, spec, red, cur): - self.update_healpix_frame_state(row, 0, cur) - task_classes["spectra"].state_set( - self, spec, "waiting", cur) - task_classes["redshift"].state_set( - self, red, "waiting", cur) - return - - def set_hpx_frame_1(row, spec, red, cur): - self.update_healpix_frame_state(row, 1, cur) - # getready() will do this for us: - #task_classes["spectra"].state_set( - # self, spec, "ready", cur) - task_classes["redshift"].state_set( - self, red, "waiting", cur) - return - - def set_hpx_frame_2(row, spec, red, cur): - self.update_healpix_frame_state(row, 2, cur) - task_classes["spectra"].state_set( - self, spec, "done", cur) - # getready() will do this: - #task_classes["redshift"].state_set( - # self, red, "ready", cur) - return - - def set_hpx_frame_3(row, spec, red, cur): - self.update_healpix_frame_state(row, 3, cur) - task_classes["spectra"].state_set( - self, spec, "done", cur) - task_classes["redshift"].state_set( - self, red, "done", cur) - return - - with self.cursor() as cur: - for row in pixrows: - cfdone = True - cfprops = row.copy() - for band in ["b", "r", "z"]: - cfprops["band"] = band - cf_name = task_classes["cframe"].name_join(cfprops) - if cfstates[cf_name] != "done": - cfdone = False - - spec_name = task_classes["spectra"].name_join(row) - red_name = task_classes["redshift"].name_join(row) - - if (not cfdone) and (not specdone) : - # The cframes do not exist, so reset the state of the - # spectra and redshift tasks. - set_hpx_frame_0(row, spec_name, red_name, cur) - else: - # The cframe exists... - if spec_exists[row["pixel"]]: - if red_exists[row["pixel"]]: - # We are all done (state 3) - set_hpx_frame_3(row, spec_name, red_name, cur) - else: - # We are only at state 2 - set_hpx_frame_2(row, spec_name, red_name, cur) - else: - # We are at just at state 1 - set_hpx_frame_1(row, spec_name, red_name, cur) - - # Update ready state of tasks - self.getready(night=night) - - return - - - def cleanup(self, tasktypes=None, expid=None, cleanfailed=False, - cleansubmitted=False): - """Reset states of tasks. - - Any tasks that are marked as "running" will have their - state reset to "ready". This can be called if a job dies before - completing all tasks. - - Args: - tasktypes (list): if not None, clean up only tasks of these types. - expid (int): if not None, only clean tasks related to this - exposure ID. Note that tasks which are independent of - an expid (psfnight, fiberflatnight, spectra, redshift) - will be ignored if this option is given. - cleanfailed (bool): if True, also reset failed tasks to ready. - cleansubmitted (bool): if True, set submitted flag to False. - - """ - tasks_running = None - - alltypes = all_task_types() - ttypes = None - if tasktypes is None: - ttypes = alltypes - else: - for tt in tasktypes: - if tt not in alltypes: - raise RuntimeError("Cannot clean invalid task type {}"\ - .format(tt)) - ttypes = tasktypes - - # Grab existing nightly tasks - with self.cursor() as cur: - tasks_running = {} - for tt in ttypes: - hasexpid = (tt not in ["psfnight", "fiberflatnight", "spectra", - "redshift"]) - if hasexpid: - # This task type has an expid property. - cmd = None - if expid is not None: - # We are cleaning only a single exposure. - cmd = "select name from {} where expid = {} and ( state = {}".format(tt, expid, task_state_to_int["running"]) - else: - # We are cleaning all exposures for this task type. - cmd = "select name from {} where ( state = {}".format(tt, task_state_to_int["running"]) - if cleanfailed: - cmd = "{} or state = {} )".format(cmd, - task_state_to_int["failed"]) - else: - cmd = "{} )".format(cmd) - cur.execute(cmd) - tasks_running[tt] = [ x for (x, ) in cur.fetchall() ] - if cleansubmitted: - if expid is not None: - cmd = "update {} set submitted = 0 where expid = {}".format(tt, expid) - else: - cmd = "update {} set submitted = 0".format(tt) - cur.execute(cmd) - else: - # This task type has no concept of an exposure ID - if expid is not None: - # We specified an exposure ID, which makes no sense - # for this task type. Skip it. - tasks_running[tt] = list() - continue - else: - # cleanup this task type. - cmd = "select name from {} where ( state = {}".format(tt, task_state_to_int["running"]) - if cleanfailed: - cmd = "{} or state = {} )".format(cmd, - task_state_to_int["failed"]) - else: - cmd = "{} )".format(cmd) - cur.execute(cmd) - tasks_running[tt] = [ x for (x, ) in cur.fetchall() ] - if cleansubmitted: - if (tt != "spectra") and (tt != "redshift"): - cmd = "update {} set submitted = 0".format(tt) - cur.execute(cmd) - - for tt in ttypes: - if len(tasks_running[tt]) > 0: - st = [ (x, "waiting") for x in tasks_running[tt] ] - self.set_states_type(tt, st) - - self.getready() - - return - - - def getready(self, night=None): - """Update DB, changing waiting to ready depending on status of dependencies . - - Args: - night (str): The night to process. - - """ - from .tasks.base import task_classes, task_type - log = get_logger() - - # Get the list of task types excluding spectra and redshifts, - # which will be handled separately. - ttypes = [ t for t in all_task_types() if (t != "spectra") \ - and (t != "redshift") ] - - with self.cursor() as cur: - for tt in ttypes: - # for each type of task, get the list of tasks in waiting mode - cmd = "select name from {} where state = {}".format(tt, task_state_to_int["waiting"]) - if night is not None: - cmd = "{} and night = {}".format(cmd, night) - cur.execute(cmd) - tasks = [ x for (x, ) in cur.fetchall()] - if len(tasks) > 0: - log.debug("checking {} {} tasks ...".format(len(tasks),tt)) - for tsk in tasks: - task_classes[tt].getready(db=self, name=tsk, cur=cur) - - for tt in [ "spectra" , "redshift" ]: - if tt == "spectra": - required_healpix_frame_state = 1 - # means we have a cframe - elif tt == "redshift": - required_healpix_frame_state = 2 - # means we have an updated spectra file - - cur.execute('select nside,pixel from healpix_frame where state = {}'.format(required_healpix_frame_state)) - entries = cur.fetchall() - for entry in entries : - log.debug("{} of pixel {} is ready to run".format(tt,entry[1])) - cur.execute('update {} set state = {} where nside = {} and pixel = {}'.format(tt,task_state_to_int["ready"],entry[0],entry[1])) - - log.debug("checking waiting {} tasks to see if they are done...".format(tt)) - cmd = "select pixel from {} where state = {}".format(tt, task_state_to_int["waiting"]) - cur.execute(cmd) - pixels = [ x for (x, ) in cur.fetchall()] - if len(pixels) > 0: - log.debug("checking {} {} ...".format(len(pixels),tt)) - if tt == "spectra": - required_healpix_frame_state = 2 - elif tt == "redshift": - required_healpix_frame_state = 3 - for pixel in pixels: - cur.execute('select pixel from healpix_frame where pixel = {} and state != {}'.format(pixel,required_healpix_frame_state)) - entries = cur.fetchall() - if len(entries)==0 : - log.debug("{} task of pixel {} is done".format(tt,pixel)) - cur.execute('update {} set state = {} where pixel = {}'.format(tt,task_state_to_int["done"],pixel)) - return - - - def update_healpix_frame_state(self, props, state, cur): - if "expid" in props : - # update from a cframe - cmd = "update healpix_frame set state = {} where expid = {} and spec = {} and state = {}".format(state,props["expid"],props["spec"],props["state"]) - else : - # update from a spectra or redshift task - cmd = "update healpix_frame set state = {} where nside = {} and pixel = {} and state = {}".format(state,props["nside"],props["pixel"],props["state"]) - - if cur is None : - with self.cursor() as cur: - cur.execute(cmd) - else : - cur.execute(cmd) - return - - - def select_healpix_frame(self, props): - res = [] - with self.cursor() as cur: - cmd = "select * from healpix_frame where " - first=True - for k in props.keys() : - if not first : cmd += " and " - first=False - cmd += "{}={}".format(k,props[k]) - cur.execute(cmd) - entries = cur.fetchall() - # convert that to list of dictionaries - for entry in entries : - tmp = dict() - for i, k in enumerate(["night", "expid", "spec", "nside", - "pixel", "ntargets", "state"]): - tmp[k] = entry[i] - res.append(tmp) - return res - - - def create_healpix_frame_table(self) : - with self.cursor() as cur: - cmd = "create table healpix_frame (night integer, expid integer, spec integer, nside integer, pixel integer, ntargets integer, state integer, unique(expid, spec, nside, pixel))" - cur.execute(cmd) - - return - - -class DataBaseSqlite(DataBase): - """Pipeline database using sqlite3 as the backend. - - Args: - path (str): the filesystem path of the database to open. If None, then - a temporary database is created in memory. - mode (str): if "r", the database is open in read-only mode. If "w", - the database is open in read-write mode and created if necessary. - - """ - def __init__(self, path, mode): - super(DataBaseSqlite, self).__init__() - - self._path = path - self._mode = mode - - create = True - if (self._path is not None) and os.path.exists(self._path): - create = False - - if self._mode == 'r' and create: - raise RuntimeError("cannot open a non-existent DB in read-only " - " mode") - - self._connstr = None - - # This timeout is in seconds - self._busytime = 1000 - - # Journaling options - self._journalmode = "persist" - self._syncmode = "normal" - - if create: - self.initdb() - return - - - def _open(self): - import sqlite3 - - if self._path is None: - # We are opening an in-memory DB - self._conn = sqlite3.connect(":memory:") - else: - try: - # only python3 supports uri option - if self._mode == 'r': - self._connstr = 'file:{}?mode=ro'.format(self._path) - else: - self._connstr = 'file:{}?mode=rwc'.format(self._path) - self._conn = sqlite3.connect(self._connstr, uri=True, - timeout=self._busytime) - except: - self._conn = sqlite3.connect(self._path, timeout=self._busytime) - if self._mode == 'w': - # In read-write mode, set the journaling - self._conn.execute("pragma journal_mode={}"\ - .format(self._journalmode)) - self._conn.execute("pragma synchronous={}".format(self._syncmode)) - # Other tuning options - self._conn.execute("pragma temp_store=memory") - self._conn.execute("pragma page_size=4096") - self._conn.execute("pragma cache_size=4000") - return - - - def _close(self): - del self._conn - self._conn = None - return - - - @contextmanager - def cursor(self): - import sqlite3 - self._open() - cur = self._conn.cursor() - cur.execute("begin transaction") - try: - yield cur - except sqlite3.DatabaseError as err: - log = get_logger() - log.error(err) - cur.execute("rollback") - raise err - else: - try: - cur.execute("commit") - except sqlite3.OperationalError: - #- sqlite3 in py3.5 can't commit a read-only finished transaction - pass - finally: - del cur - self._close() - - - def initdb(self): - """Create DB tables for all tasks if they do not exist. - """ - # check existing tables - tables_in_db = None - with self.cursor() as cur: - cur.execute("select name FROM sqlite_master WHERE type='table'") - tables_in_db = [x for (x, ) in cur.fetchall()] - - # Create a table for every task type - from .tasks.base import task_classes, task_type - for tt, tc in task_classes.items(): - if tt not in tables_in_db: - tc.create(self) - - if "healpix_frame" not in tables_in_db: - self.create_healpix_frame_table() - return - - -class DataBasePostgres(DataBase): - """Pipeline database using PostgreSQL as the backend. - - Args: - host (str): The database server. - port (int): The connection port. - dbname (str): The database to connect. - user (str): The user name for the connection. The password should be - stored in the ~/.pgpass file. - schema (str): The schema within the database. If this is specified, - then the database is assumed to exist. Otherwise the schema is - computed from a hash of the production location and will be - created. - authorize (str): If creating the schema, this is the list of - additional roles that should be granted access. - - """ - def __init__(self, host, port, dbname, user, schema=None, authorize=None): - super(DataBasePostgres, self).__init__() - - self._schema = schema - self._user = user - self._dbname = dbname - self._host = host - self._port = port - self._authorize = authorize - - self._proddir = os.path.abspath(io.specprod_root()) - - create = False - if self._schema is None: - create = True - self._schema = self._compute_schema() - - if create: - self.initdb() - return - - - def _compute_schema(self): - import hashlib - md = hashlib.md5() - md.update(self._proddir.encode()) - return "pipe_{}".format(md.hexdigest()) - - - def _open(self): - import psycopg2 as pg2 - import time - import numpy.random - - # Open connection. If psycopg2 raises an exception, then sleep - # for a random time interval and keep trying. - maxtry = 10 - ntry = 0 - while True: - try: - self._conn = pg2.connect(host=self._host, port=self._port, - user=self._user, dbname=self._dbname) - except pg2.OperationalError as err: - log = get_logger() - log.debug("PostgreSQL connection failed with '{}', will sleep and retry".format(err)) - if ntry > maxtry: - log.error(err) - break - numpy.random.seed(int(time.time())) - sec = numpy.random.uniform() * 3.0 - time.sleep(sec) - ntry += 1 - else: - break - - return - - - def _close(self): - del self._conn - self._conn = None - return - - - @property - def schema(self): - return self._schema - - - def _have_schema(self, cur): - com = "select exists(select 1 from pg_namespace where nspname = '{}')".format(self._schema) - cur.execute(com) - return cur.fetchone()[0] - - - @contextmanager - def cursor(self, skipcheck=False): - import psycopg2 - self._open() - cur = self._conn.cursor() - if not skipcheck: - have_schema = self._have_schema(cur) - if not have_schema: - raise RuntimeError("Postgres schema for production {} does" - " not exist. Make sure you create the production with" - " postgres options and source the top-level setup.sh" - " file.".format(self._proddir)) - cur.execute("set search_path to '{}'".format(self._schema)) - cur.execute("begin transaction") - try: - yield cur - except psycopg2.DatabaseError as err: - log = get_logger() - log.error(err) - cur.execute("rollback") - raise err - else: - cur.execute("commit") - finally: - del cur - self._close() - - - def initdb(self): - """Create DB tables for all tasks if they do not exist. - """ - log = get_logger() - # Check existence of the schema. If we were not passed the schema - # in the constructor, it means that we are creating a new prod, so any - # existing schema should be wiped and recreated. - tables_in_db = None - with self.cursor(skipcheck=True) as cur: - # See if our schema already exists... - have_schema = self._have_schema(cur) - if have_schema: - # We need to wipe it first - com = "drop schema {} cascade".format(self._schema) - log.debug(com) - cur.execute(com) - com = "create schema {} authorization {}"\ - .format(self._schema, self._user) - log.debug(com) - cur.execute(com) - - if self._authorize is not None: - com = "grant usage on schema {} to {}"\ - .format(self._schema, self._authorize) - log.debug(com) - cur.execute(com) - - com = "alter default privileges in schema {} grant select on tables to {}".format(self._schema, self._authorize) - log.debug(com) - cur.execute(com) - - com = "alter default privileges in schema {} grant select,usage on sequences to {}".format(self._schema, self._authorize) - log.debug(com) - cur.execute(com) - - com = "alter default privileges in schema {} grant execute on functions to {}".format(self._schema, self._authorize) - log.debug(com) - cur.execute(com) - - com = "alter default privileges in schema {} grant usage on types to {}".format(self._schema, self._authorize) - log.debug(com) - cur.execute(com) - - # Create a table of information about this prod - com = "create table {}.info (key text unique, val text)"\ - .format(self._schema) - log.debug(com) - cur.execute(com) - com = "insert into {}.info values ('{}', '{}')"\ - .format(self._schema, "path", self._proddir) - log.debug(com) - cur.execute(com) - if 'USER' in os.environ: - com = "insert into {}.info values ('{}', '{}')"\ - .format(self._schema, "created_by", os.environ['USER']) - log.debug(com) - cur.execute(com) - - # check existing tables - cur.execute("select tablename from pg_tables where schemaname = '{}'".format(self.schema)) - tables_in_db = [x for (x, ) in cur.fetchall()] - - # Create a table for every task type - from .tasks.base import task_classes, task_type - for tt, tc in task_classes.items(): - if tt not in tables_in_db: - tc.create(self) - - if "healpix_frame" not in tables_in_db: - self.create_healpix_frame_table() - - return - - -def load_db(dbstring, mode="w", user=None): - """Load a database from a connection string. - - This instantiates either an sqlite or postgresql database using a string. - If this string begins with "postgresql:", then it is taken to be the - information needed to connect to a postgres server. Otherwise it is - assumed to be a filesystem path to use with sqlite. The mode is only - meaningful when using sqlite. Postgres permissions are controlled through - the user permissions. - - Args: - dbstring (str): either a filesystem path (sqlite) or a colon-separated - string of connection properties in the form - "postresql:::::". - mode (str): for sqlite, the mode. - user (str): for postgresql, an alternate user name for opening the DB. - This can be used to connect as a user with read-only access. - - Returns: - DataBase: a derived database class of the appropriate type. - - """ - if re.search(r"postgresql:", dbstring) is not None: - props = dbstring.split(":") - host = props[1] - port = int(props[2]) - dbname = props[3] - username = props[4] - if user is not None: - username = user - schema = None - if len(props) > 5: - # Our DB string also contains the name of an existing - # schema. - schema = props[5] - return DataBasePostgres(host=host, port=port, dbname=dbname, - user=username, schema=schema) - else: - return DataBaseSqlite(dbstring, mode) diff --git a/deprecated/py/desispec/pipeline/defs.py b/deprecated/py/desispec/pipeline/defs.py deleted file mode 100644 index bd7425b91..000000000 --- a/deprecated/py/desispec/pipeline/defs.py +++ /dev/null @@ -1,55 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.defs -====================== - -Common definitions needed by pipeline modules. -""" - -from __future__ import absolute_import, division, print_function - - -task_states = [ - "waiting", - "ready", - "running", - "done", - "failed" -] -"""The valid states of each pipeline task.""" - -task_state_to_int = { - "waiting" : 0, - "ready" : 1, - "running" : 2, - "done" : 3, - "failed" : 4 -} - -task_int_to_state = { - 0 : "waiting", - 1 : "ready", - 2 : "running", - 3 : "done", - 4 : "failed" -} - - -state_colors = { - "waiting": "#000000", - "ready" : "#0000ff", - "running": "#ffff00", - "done": "#00ff00", - "failed": "#ff0000", -} -"""State colors used for visualization.""" - - -task_name_sep = "_" -"""The separator string used for building object names.""" - -prod_options_name = "options.yaml" -"""The name of the options file inside the run directory.""" diff --git a/deprecated/py/desispec/pipeline/plan.py b/deprecated/py/desispec/pipeline/plan.py deleted file mode 100644 index d0cef97d6..000000000 --- a/deprecated/py/desispec/pipeline/plan.py +++ /dev/null @@ -1,492 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.plan -====================== - -Tools for planning pipeline runs. -""" - -from __future__ import absolute_import, division, print_function - -import os -import stat -import sys -import re - -import numpy as np - -from desiutil.log import get_logger - -from .. import io - -from ..parallel import (dist_uniform, dist_discrete, dist_discrete_all, - weighted_partition, stdouterr_redirected, use_mpi) - -from .prod import task_read, task_write - - -def nersc_machine(name, queue): - """Return the properties of the specified NERSC host. - - Args: - name (str): the name of the host. Allowed values are: cori-haswell - and cori-knl. - queue (str): the queue on the machine (regular, debug, etc) - - Returns: - dict: properties of this machine. - - """ - props = dict() - if name == "cori-haswell": - props["sbatch"] = [ - "#SBATCH --constraint=haswell" - ] - props["nodecores"] = 32 - props["corecpus"] = 2 - props["nodemem"] = 125.0 - props["timefactor"] = 1.0 - props["startup"] = 2.0 - if queue == "debug": - props["maxnodes"] = 64 - props["maxtime"] = 30 - props["submitlimit"] = 5 - props["sbatch"].append("#SBATCH --partition=debug") - elif queue == "regular": - props["maxnodes"] = 512 - props["maxtime"] = 12 * 60 - props["submitlimit"] = 5000 - props["sbatch"].append("#SBATCH --partition=regular") - elif queue == "realtime": - props["maxnodes"] = 10 - props["maxtime"] = 720 - props["submitlimit"] = 5000 - props["sbatch"].append("#SBATCH --exclusive") - props["sbatch"].append("#SBATCH --qos=realtime") - else: - raise RuntimeError("Unknown {} queue '{}'".format(name, queue)) - elif name == "cori-knl": - props["sbatch"] = [ - "#SBATCH --constraint=knl,quad,cache", - "#SBATCH --core-spec=4" - ] - props["nodecores"] = 64 - props["corecpus"] = 4 - props["nodemem"] = 93.0 - props["timefactor"] = 3.0 - props["startup"] = 2.0 - if queue == "debug": - props["maxnodes"] = 512 - props["maxtime"] = 30 - props["submitlimit"] = 5 - props["sbatch"].append("#SBATCH --partition=debug") - elif queue == "regular": - props["maxnodes"] = 4096 - props["maxtime"] = 12 * 60 - props["submitlimit"] = 5000 - props["sbatch"].append("#SBATCH --partition=regular") - else: - raise RuntimeError("Unknown {} queue '{}'".format(name, queue)) - else: - raise RuntimeError("Unknown machine '{}' choice is 'cori-haswell' or 'cori-knl'".format(name)) - - return props - - -def compute_nodes(nworker, taskproc, nodeprocs): - """Compute number of nodes for the number of workers. - - Args: - nworker (int): The number of workers. - taskproc (int): The number of processes per task. - nodeprocs (int): The number of processes per node. - - Returns: - (int): The number of required nodes. - - """ - nds = (nworker * taskproc) // nodeprocs - if nds * nodeprocs < nworker * taskproc: - nds += 1 - return nds - - -def worker_times(tasktimes, workerdist, startup=0.0): - """Compute the time needed for each worker. - - Args: - tasktimes (array): array of individual task times. - workerdist (list): List of tuples of indices in taskstimes. - startup (float): Startup overhead in minutes for each worker. - - Returns: - (tuple): The (worker times, min, max). - - Notes / Examples: - len(tasktimes) = number of tasks - len(workerdist) = number of workers - workerdist[i] = tuple of tasktime indices assigned to worker i - sum(tasktimes[workerdist[i]]) = expected total time for worker i - """ - tasktimes = np.asarray(tasktimes) - workertimes = np.array([startup + np.sum(tasktimes[ii]) for ii in workerdist]) - workermax = np.max(workertimes) - workermin = np.min(workertimes) - return workertimes, workermin, workermax - - -def compute_worker_tasks(tasktype, tasklist, tfactor, nworker, - workersize, startup=0.0, db=None, num_nodes=None): - """Compute the distribution of tasks for specified workers. - - Args: - tasktype (str): The task type. - tasklist (list): List of tasks, all of type tasktype. - tfactor (float): Additional runtime scaling factor. - nworker (int): The number of workers. - workersize (int): The number of processes in each worker. - startup (float, optional): Startup overhead in minutes for each worker. - db (DataBase, optional): the database to pass to the task runtime - calculation. - num_nodes (int, optional): number of nodes over which the workers are distributed - - Returns: - (tuple): The (sorted tasks, sorted runtime weights, dist) results - where dist is the a list of tuples (one per worker) indicating - the indices of tasks for that worker in the - returned sorted list of tasks. - - """ - from .tasks.base import task_classes, task_type - log = get_logger() - - # Run times for each task at this concurrency - tasktimes = [(x, tfactor * task_classes[tasktype].run_time( - x, workersize, db=db)) for x in tasklist] - - # Sort the tasks by runtime to improve the partitioning - # NOTE: sorting is unnecessary when using weighted_partition instead of - # dist_discrete_all, but leaving for now while comparing/debugging - tasktimes = list(sorted(tasktimes, key=lambda x: x[1]))[::-1] - mintasktime = tasktimes[-1][1] - maxtasktime = tasktimes[0][1] - log.debug("task runtime range = {:.2f} ... {:.2f}".format(mintasktime, maxtasktime)) - - # Split the task names and times - worktasks = [x[0] for x in tasktimes] - workweights = [x[1] for x in tasktimes] - - # Distribute tasks - workdist = None - if len(workweights) == nworker: - # One task per worker - workdist = [[i,] for i in range(nworker)] - else: - # workdist = dist_discrete_all(workweights, nworker) - if num_nodes is not None: - workers_per_node = (nworker + num_nodes - 1 ) // num_nodes - else: - workers_per_node = None - - workdist = weighted_partition(workweights, nworker, - groups_per_node=workers_per_node) - - # Find the runtime for each worker - workertimes, workermin, workermax = worker_times( - workweights, workdist, startup=startup) - - log.debug("worker task assignment:") - log.debug(" 0: {:.2f} minutes".format(workertimes[0])) - log.debug(" first task {}".format(worktasks[workdist[0][0]])) - log.debug(" last task {}".format(worktasks[workdist[0][-1]])) - if nworker > 1: - log.debug(" ...") - log.debug(" {}: {:.2f} minutes".format(nworker-1, workertimes[-1])) - log.debug(" first task {}".format( - worktasks[workdist[nworker-1][0]])) - log.debug(" last task {}".format( - worktasks[workdist[nworker-1][-1]] - ) - ) - log.debug("range of worker times = {:.2f} ... {:.2f}".format(workermin, workermax)) - - return (worktasks, workweights, workdist) - - -def nersc_job_size(tasktype, tasklist, machine, queue, maxtime, maxnodes, - nodeprocs=None, db=None, balance=False): - """Compute the NERSC job parameters based on constraints. - - Given the list of tasks, query their estimated runtimes and determine - the "best" job size to use. If the job is too big to fit the constraints - then split up the tasks into multiple jobs. - - If maxtime or maxnodes is zero, then the defaults for the queue are used. - - The default behavior is to create jobs that are as large as possible- i.e. - to run all tasks simultaneously in parallel. In general, larger jobs with - a shorter run time will move through the queue faster. If the job size - exceeds the maximum number of nodes, then the job size is fixed to this - maximum and the runtime is extended. If the runtime exceeds maxtime, then - the job is split. - - Args: - tasktype (str): the type of these tasks. - tasklist (list): the list of tasks. - machine (str): the nersc machine name, - e.g. cori-haswell, cori-knl - queue (str): the nersc queue name, e.g. regular or debug - maxtime (int): the maximum run time in minutes. - maxnodes (int): the maximum number of nodes. - nodeprocs (int): the number of processes per node. If None, estimate - this based on the per-process memory needs of the task and the - machine properties. - db (DataBase): the database to pass to the task runtime - calculation. - balance (bool): if True, change the number of workers to load - balance the job. - - Returns: - list: List of tuples (nodes, nodeprocs, runtime, nworker, workersize, - tasks) containing one entry per job. Each entry specifies the - number of nodes to use, the expected total runtime, number of - workers, and the list of tasks for that job. - - """ - from .tasks.base import task_classes, task_type - log = get_logger() - - log.debug("inputs:") - log.debug(" tasktype = {}".format(tasktype)) - log.debug(" len(tasklist) = {}".format(len(tasklist))) - log.debug(" machine = {}".format(machine)) - log.debug(" queue = {}".format(queue)) - log.debug(" maxtime = {}".format(maxtime)) - log.debug(" nodeprocs = {}".format(nodeprocs)) - - if len(tasklist) == 0: - raise RuntimeError("List of tasks is empty") - - # Get the machine properties - hostprops = nersc_machine(machine, queue) - log.debug("hostprops={}".format(hostprops)) - - if maxtime <= 0: - maxtime = hostprops["maxtime"] - log.debug("Using default {} {} maxtime={}".format( - machine, queue, maxtime)) - if maxtime > hostprops["maxtime"]: - raise RuntimeError("requested max time '{}' is too long for {} " - "queue '{}'".format(maxtime, machine, queue)) - - if maxnodes <= 0: - maxnodes = hostprops["maxnodes"] - log.debug("Using default {} {} maxnodes={}".format( - machine, queue, maxnodes)) - else: - log.debug("Using user-specified {} {} maxnodes={}".format( - machine, queue, maxnodes)) - if maxnodes > hostprops["maxnodes"]: - raise RuntimeError("requested max nodes '{}' is larger than {} " - "queue '{}' with {} nodes".format( - maxnodes, machine, queue, hostprops["maxnodes"])) - - coremem = hostprops["nodemem"] / hostprops["nodecores"] - - # Required memory for each task - taskfullmems = [ (x, task_classes[tasktype].run_max_mem_task(x, db=db)) - for x in tasklist ] - - # The max memory required by any task - maxtaskmem = np.max([x[1] for x in taskfullmems]) - log.debug("Maximum memory per task = {}".format(maxtaskmem)) - - # Required memory for a single process of the task. - taskprocmems = [ (x, task_classes[tasktype].run_max_mem_proc(x, db=db)) - for x in tasklist ] - maxprocmem = np.max([x[1] for x in taskprocmems]) - log.debug("Maximum memory per process = {}".format(maxprocmem)) - - maxnodeprocs = hostprops["nodecores"] - if maxprocmem > 0.0: - procmem = coremem - while procmem < maxprocmem: - maxnodeprocs = maxnodeprocs // 2 - procmem *= 2 - log.debug("Maximum processes per node based on memory requirements = {}" - .format(maxnodeprocs)) - else: - log.debug("Using default max procs per node ({})".format(maxnodeprocs)) - - if nodeprocs is None: - nodeprocs = maxnodeprocs - else: - if nodeprocs > maxnodeprocs: - log.warning( - "Cannot use {} procs per node (insufficient memory). Using {} instead.".format(nodeprocs, maxnodeprocs) - ) - nodeprocs = maxnodeprocs - - if nodeprocs > hostprops["nodecores"]: - raise RuntimeError("requested procs per node '{}' is more than the " - "the number of cores per node on {}".format(nodeprocs, machine)) - - log.debug("Using {} processes per node".format(nodeprocs)) - - # How many nodes are required to achieve the maximum memory of the largest - # task? - mintasknodes = 1 - if maxtaskmem > 0.0: - mintasknodes += int(maxtaskmem / hostprops["nodemem"]) - - # Max number of procs to use per task. - taskproc = task_classes[tasktype].run_max_procs() - if taskproc == 0: - # This means that the task is flexible and can use an arbitrary - # number of processes. We assign it the number of processes - # corresponding to the number of nodes and procs per node dictated - # by the memory requirements. - taskproc = mintasknodes * nodeprocs - - log.debug("Using {} processes per task".format(taskproc)) - - # Number of workers (as large as possible) - availproc = maxnodes * nodeprocs - maxworkers = availproc // taskproc - nworker = maxworkers - if nworker > len(tasklist): - nworker = len(tasklist) - log.debug("Initial number of workers = {}".format(nworker)) - - # Number of nodes - nodes = compute_nodes(nworker, taskproc, nodeprocs) - log.debug("Required nodes = {}".format(nodes)) - - # Estimate the startup cost of each worker as a constant based on the - # job size. - startup_scale = nodes // 200 - startup_time = (1.0 + startup_scale) * hostprops["startup"] - log.debug("Using {} minutes for worker startup time".format(startup_time)) - - # Compute the distribution of tasks to these workers - (worktasks, worktimes, workdist) = compute_worker_tasks( - tasktype, tasklist, hostprops["timefactor"], nworker, taskproc, - startup=startup_time, db=db) - log.debug("Task times range from {} to {} minutes" - .format(worktimes[0], worktimes[-1])) - - # Compute the times for each worker - workertimes, workermin, workermax = worker_times( - worktimes, workdist, startup=startup_time) - log.debug("Initial worker times range from {} to {} minutes" - .format(workermin, workermax)) - - # Examine the maximum time needed for all workers. If this is too large - # for the requested maximum run time, then we need to split the job. - # If we have a single job, then we optionally load balance by reducing - # the job size and extending the run time. - - final = list() - - if workermax > maxtime: - # We must split the job. The tasks are already sorted from large to - # small. To decide where to split, we accumulate tasks unti we - # get to the walltime threshold. - log.debug( - "Max worker time ({}) is larger than maximum allowed time ({})" - .format(workermax, maxtime) - ) - log.debug("Splitting job") - maxminutes = maxtime * nworker - jobminutes = startup_time * nworker - jobtasks = list() - jindx = 0 - for tsk, tsktime in zip(worktasks, worktimes): - if jobminutes + tsktime > maxminutes: - # Close out this job. We pass the list of tasks through - # this calculation function to ensure that everything matches - # the same calculation that will be done at runtime. - (jobworktasks, jobworktimes, jobworkdist) = \ - compute_worker_tasks( - tasktype, jobtasks, hostprops["timefactor"], - nworker, taskproc, startup=startup_time, db=db) - jobworkertimes, jobworkermin, jobworkermax = worker_times( - jobworktimes, jobworkdist, startup=startup_time) - log.debug( - "Split job {} has {} tasks and max time {}" - .format(jindx, len(jobworktasks), jobworkermax) - ) - final.append( - (nodes, nodeprocs, jobworkermax, nworker, taskproc, - jobworktasks) - ) - jindx += 1 - else: - # Accumulate task to this job - jobtasks.append(tsk) - - # Close out any remaining job - if len(jobtasks) > 0: - (jobworktasks, jobworktimes, jobworkdist) = \ - compute_worker_tasks( - tasktype, jobtasks, hostprops["timefactor"], - nworker, taskproc, startup=startup_time, db=db) - jobworkertimes, jobworkermin, jobworkermax = worker_times( - jobworktimes, jobworkdist, startup=startup_time) - log.debug( - "Split job {} has {} tasks and max time {}" - .format(jindx, len(jobworktasks), jobworkermax) - ) - final.append( - (nodes, nodeprocs, jobworkermax, nworker, taskproc, - jobworktasks) - ) - elif balance: - log.debug("Checking for load imbalance as requested") - # We are load balancing a single job - while workermax > 1.5 * workermin: - # pretty bad imbalance... - if (nworker > 2) and (workermax < 0.5 * maxtime): - # We don't want to go lower than 2 workers, since that - # allows one worker to do the "big" task and the other - # worker to do everything else. We also can double the - # runtime if it will exceed our maximum. - nworker = nworker // 2 - log.debug( - "Job is imbalanced, reducing workers to {}" - .format(nworker) - ) - # Recompute job sizes - nodes = compute_nodes(nworker, taskproc, nodeprocs) - log.debug("Number of nodes now = {}".format(nodes)) - (worktasks, worktimes, workdist) = compute_worker_tasks( - tasktype, tasklist, hostprops["timefactor"], nworker, - taskproc, startup=startup_time, db=db) - workertimes, workermin, workermax = worker_times( - worktimes, workdist, startup=startup_time) - log.debug("Worker times range from {} to {} minutes" - .format(workermin, workermax)) - else: - log.debug( - "Job is imbalanced, but there are too few workers or the runtime is already too long." - ) - break - log.debug( - "Adding job with {} tasks, {} workers, and max time {} on {} nodes" - .format(len(worktasks), nworker, workermax, nodes) - ) - final.append((nodes, nodeprocs, workermax, nworker, taskproc, - worktasks)) - else: - # We just have one job - log.debug( - "Adding job with {} tasks, {} workers, and max time {} on {} nodes" - .format(len(worktasks), nworker, workermax, nodes) - ) - final.append((nodes, nodeprocs, workermax, nworker, taskproc, - worktasks)) - - return final diff --git a/deprecated/py/desispec/pipeline/prod.py b/deprecated/py/desispec/pipeline/prod.py deleted file mode 100644 index 630f27d48..000000000 --- a/deprecated/py/desispec/pipeline/prod.py +++ /dev/null @@ -1,320 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.prod -====================== - -Functions for updating and loading a production. -""" - -from __future__ import absolute_import, division, print_function - -import os -import re -import sys - -import numpy as np - -from yaml import load as yload -from yaml import dump as ydump -try: - from yaml import CLoader as YLoader -except ImportError: - from yaml import Loader as YLoader - -import healpy as hp - -from desiutil.log import get_logger - -from .. import io - -from .defs import prod_options_name - -from .db import load_db - - -def yaml_write(path, input): - """Write a dictionary to a file. - - Args: - path (str): the output file name. - input (dict): the data. - - Returns: - nothing. - """ - with open(path, "w") as f: - ydump(input, f, default_flow_style=False) - return - - -def yaml_read(path): - """Read a dictionary from a file. - - Args: - path (str): the input file name. - - Returns: - dict: the data. - """ - data = None - with open(path, "r") as f: - data = yload(f, Loader=YLoader) - return data - - -def task_write(path, tasklist): - """Write a task list to a text file or STDOUT. - - If the path is None, write lines to STDOUT. In all cases, write a special - termination line so that this stream or file can be passed into the - task_read function. - - Args: - path (str): the output file name. - tasklist (list): the data. - - Returns: - nothing. - """ - if path is None: - for tsk in tasklist: - sys.stdout.write("{}\n".format(tsk)) - sys.stdout.write("#END\n") - else: - with open(path, "w") as f: - for tsk in tasklist: - f.write("{}\n".format(tsk)) - f.write("#END\n") - return - - -def task_read(path): - """Read a task list from a text file or STDIN. - - Lines that begin with '#' are ignored as comments. If the path is None, - lines are read from STDIN until an EOF marker is received. - - Args: - path (str): the input file name. - - Returns: - list: the list of tasks. - - """ - data = list() - compat = re.compile(r"^#.*") - if path is None: - endpat = re.compile(r"^#END.*") - for line in sys.stdin: - if endpat.match(line) is not None: - break - if compat.match(line) is None: - data.append(line.rstrip()) - else: - with open(path, "r") as f: - for line in f: - if compat.match(line) is None: - data.append(line.rstrip()) - return data - - -def select_nights(allnights, nightstr): - """Select nights based on regex matches. - - Given a list of nights, select all nights matching the specified - patterns and return this subset. - - Args: - allnights (list): list of all nights as strings - nightstr (str): comma-separated list of regex patterns. - - Returns: - list: list of nights that match the patterns. - """ - - nights = [] - if nightstr is not None: - nightsel = nightstr.split(",") - for sel in nightsel: - pat = re.compile(sel) - for nt in allnights: - mat = pat.match(nt) - if mat is not None: - if nt not in nights: - nights.append(nt) - nights = sorted(nights) - else: - nights = sorted(allnights) - - return nights - - -def update_prod(nightstr=None, hpxnside=64, expid=None): - """Create or update a production directory tree. - - For a given production, create the directory hierarchy and the starting - default options.yaml file if it does not exist. Also initialize the - production DB if it does not exist. Then update the DB with one or more - nights from the raw data. Nights to update may be specified by a list of - simple regex matches. - - Args: - nightstr (str): comma-separated list of regex patterns. - hpxnside (int): The nside value to use for spectral grouping. - expid (int): Only update a single exposure. If this is specified, - then nightstr must contain only a single night. - - """ - from .tasks.base import task_classes, task_type - - rawdir = os.path.abspath(io.rawdata_root()) - proddir = os.path.abspath(io.specprod_root()) - - # create main directories if they don"t exist - - if not os.path.isdir(proddir): - os.makedirs(proddir) - - cal2d = os.path.join(proddir, "calibnight") - if not os.path.isdir(cal2d): - os.makedirs(cal2d) - - expdir = os.path.join(proddir, "exposures") - if not os.path.isdir(expdir): - os.makedirs(expdir) - - predir = os.path.join(proddir, "preproc") - if not os.path.isdir(predir): - os.makedirs(predir) - - specdir = os.path.join(proddir, "spectra-{}".format(hpxnside)) - if not os.path.isdir(specdir): - os.makedirs(specdir) - - rundir = io.get_pipe_rundir() - if not os.path.isdir(rundir): - os.makedirs(rundir) - - scriptdir = os.path.join(rundir, io.get_pipe_scriptdir()) - if not os.path.isdir(scriptdir): - os.makedirs(scriptdir) - - logdir = os.path.join(rundir, io.get_pipe_logdir()) - if not os.path.isdir(logdir): - os.makedirs(logdir) - - nightscrdir = os.path.join(scriptdir, io.get_pipe_nightdir()) - if not os.path.isdir(nightscrdir): - os.makedirs(nightscrdir) - - nightlogdir = os.path.join(logdir, io.get_pipe_nightdir()) - if not os.path.isdir(nightlogdir): - os.makedirs(nightlogdir) - - pixlogdir = os.path.join(logdir, io.get_pipe_pixeldir()) - if not os.path.isdir(pixlogdir): - os.makedirs(pixlogdir) - - optfile = os.path.join(rundir, prod_options_name) - if not os.path.isfile(optfile): - opts = dict() - for tt, tc in task_classes.items(): - tdict = { tt : tc.run_defaults() } - opts.update(tdict) - yaml_write(optfile, opts) - - # Load the database, this will create and initialize it if it does not - # exist. - - dbpath = io.get_pipe_database() - db = load_db(dbpath, "w") - - # Get list of available nights - - allnights = [] - nightpat = re.compile(r"\d{8}") - for root, dirs, files in os.walk(rawdir, topdown=True): - for d in dirs: - nightmat = nightpat.match(d) - if nightmat is not None: - allnights.append(d) - break - - # Select the requested nights - - nights = select_nights(allnights, nightstr) - if (expid is not None) and (len(nights) > 1): - raise RuntimeError("If updating a production for one exposure, only " - "a single night should be specified.") - - # Create per-night directories and update the DB for each night. - - for nt in nights: - nexpdir = os.path.join(expdir, nt) - if not os.path.isdir(nexpdir): - os.makedirs(nexpdir) - npredir = os.path.join(predir, nt) - if not os.path.isdir(npredir): - os.makedirs(npredir) - ndir = os.path.join(cal2d, nt) - if not os.path.isdir(ndir): - os.makedirs(ndir) - nlog = os.path.join(nightlogdir, nt) - if not os.path.isdir(nlog): - os.makedirs(nlog) - nscr = os.path.join(nightscrdir, nt) - if not os.path.isdir(nscr): - os.makedirs(nscr) - - db.update(nt, hpxnside, expid) - - # make per-exposure dirs - exps = None - with db.cursor() as cur: - if expid is None: - cur.execute(\ - "select expid from fibermap where night = {}".format(nt)) - else: - # This query is essential a check that the expid is valid. - cur.execute("select expid from fibermap where night = {} " - "and expid = {}".format(nt, expid)) - exps = [ int(x[0]) for x in cur.fetchall() ] - for ex in exps: - fdir = os.path.join(nexpdir, "{:08d}".format(ex)) - if not os.path.isdir(fdir): - os.makedirs(fdir) - fdir = os.path.join(npredir, "{:08d}".format(ex)) - if not os.path.isdir(fdir): - os.makedirs(fdir) - - return - - -def load_prod(mode="w", user=None): - """Load the database and options for a production. - - This loads the database from the production location defined by the usual - DESI environment variables. It also loads the global options file for - the production. - - Args: - mode (str): open mode for sqlite database ("r" or "w"). - user (str): for postgresql, an alternate user name for opening the DB. - This can be used to connect as a user with read-only access. - - Returns: - tuple: (pipeline.db.DataBase, dict) The database for the production - and the global options dictionary. - - """ - dbpath = io.get_pipe_database() - db = load_db(dbpath, mode=mode, user=user) - - rundir = io.get_pipe_rundir() - optfile = os.path.join(rundir, prod_options_name) - opts = yaml_read(optfile) - - return (db, opts) diff --git a/deprecated/py/desispec/pipeline/run.py b/deprecated/py/desispec/pipeline/run.py deleted file mode 100644 index 1fe1f582f..000000000 --- a/deprecated/py/desispec/pipeline/run.py +++ /dev/null @@ -1,610 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.run -===================== - -Tools for running the pipeline. -""" - -from __future__ import absolute_import, division, print_function - -import os -import sys -import time -import random -import signal - -import numpy as np - -from desiutil.log import get_logger - -from .. import io - -from ..parallel import (dist_uniform, dist_discrete, dist_discrete_all, - stdouterr_redirected) - -from .prod import load_prod - -from .db import check_tasks - -from .scriptgen import parse_job_env - -from .plan import compute_worker_tasks, worker_times - - -#- TimeoutError and timeout handler to prevent runaway tasks -class TimeoutError(Exception): - pass - -def _timeout_handler(signum, frame): - raise TimeoutError('Timeout at {}'.format(time.asctime())) - -def run_task(name, opts, comm=None, logfile=None, db=None): - """Run a single task. - - Based on the name of the task, call the appropriate run function for that - task. Log output to the specified file. Run using the specified MPI - communicator and optionally update state to the specified database. - - Note: This function DOES NOT check the database or filesystem to see if - the task has been completed or if its dependencies exist. It assumes that - some higher-level code has done that if necessary. - - Args: - name (str): the name of this task. - opts (dict): options to use for this task. - comm (mpi4py.MPI.Comm): optional MPI communicator. - logfile (str): output log file. If None, do not redirect output to a - file. - db (pipeline.db.DB): The optional database to update. - - Returns: - int: the total number of processes that failed. - - """ - from .tasks.base import task_classes, task_type - log = get_logger() - - ttype = task_type(name) - - nproc = 1 - rank = 0 - if comm is not None: - nproc = comm.size - rank = comm.rank - - if rank == 0: - if (logfile is not None) and os.path.isfile(logfile): - os.remove(logfile) - # Mark task as in progress - if db is not None: - task_classes[ttype].state_set(db=db, name=name, state="running") - - failcount = 0 - - #- Set timeout alarm to avoid runaway tasks - old_sighandler = signal.signal(signal.SIGALRM, _timeout_handler) - expected_run_time = task_classes[ttype].run_time(name, procs=nproc, db=db) - - # Are we running on a slower/faster node than default timing? - timefactor = float(os.getenv("DESI_PIPE_RUN_TIMEFACTOR", default=1.0)) - expected_run_time *= timefactor - - signal.alarm(int(expected_run_time * 60)) - if rank == 0: - log.info("Running {} with timeout {:.1f} min".format( - name, expected_run_time)) - - task_start_time = time.time() - try: - if logfile is None: - # No redirection - if db is None: - failcount = task_classes[ttype].run(name, opts, comm=comm) - else: - failcount = task_classes[ttype].run_and_update(db, name, opts, - comm=comm) - else: - #- time jitter so that we don't open all log files simultaneously - time.sleep(2 * random.random()) - with stdouterr_redirected(to=logfile, comm=comm): - if db is None: - failcount = task_classes[ttype].run(name, opts, comm=comm) - else: - failcount = task_classes[ttype].run_and_update(db, name, - opts, comm=comm) - except TimeoutError: - dt = time.time() - task_start_time - if rank == 0: - log.error("Task {} timed out after {:.1f} sec".format(name, dt)) - if db is not None: - task_classes[ttype].state_set(db, name, "failed") - - failcount = nproc - finally: - #- Reset timeout alarm whether we finished cleanly or not - signal.alarm(0) - - #- Restore previous signal handler - signal.signal(signal.SIGALRM, old_sighandler) - if rank == 0: - log.debug("Finished with task {} sigalarm reset".format(name)) - log.debug("Task {} returning failcount {}".format(name, failcount)) - - return failcount - - -def run_task_simple(name, opts, comm=None): - """Run a single task with no DB or log redirection. - - This a wrapper around run_task() for use without a database and with no - log redirection. See documentation for that function. - - Args: - name (str): the name of this task. - opts (dict): options to use for this task. - comm (mpi4py.MPI.Comm): optional MPI communicator. - - Returns: - int: the total number of processes that failed. - - """ - return run_task(name, opts, comm=comm, logfile=None, db=None) - - -def run_dist(tasktype, tasklist, db, nproc, procs_per_node, force=False): - """Compute the runtime distribution of tasks. - - For a given number of processes, parse job environment variables and - compute the number of workers to use and the remaining tasks to process. - Divide the processes into groups, and associate some (or all) of those - groups to workers. Assign tasks to these groups of processes. Some groups - may have zero tasks if there are more groups than workers needed. - - Returns: - tuple: The (groupsize, groups, tasks, dist) information. Groupsize - is the processes per group. Groups is a list of - tuples (one per process) giving the group number and rank within - the group. The tasks are a sorted list of tasks containing the - subset of the inputs that needs to be run. The dist is a list of - tuples (one per group) containing the indices of tasks - assigned to each group. - """ - from .tasks.base import task_classes, task_type - log = get_logger() - - runtasks = None - ntask = None - ndone = None - log.info("Distributing {} {} tasks".format(len(tasklist), tasktype)) - if force: - # Run everything - runtasks = tasklist - ntask = len(runtasks) - ndone = 0 - log.info("Forcibly running {} tasks regardless of state".format(ntask)) - else: - # Actually check which things need to be run. - states = check_tasks(tasklist, db=db) - runtasks = [ x for x in tasklist if states[x] == "ready" ] - ntask = len(runtasks) - ndone = len([ x for x in tasklist if states[x] == "done" ]) - log.info( - "Found {} tasks ready to run and {} tasks done" - .format(ntask, ndone) - ) - - # Query the environment for DESI runtime variables set in - # pipeline-generated slurm scripts and use default values if - # they are not found. Then compute the number of workers and the - # distribution of tasks in a way that is identical to what was - # done during job planning. - - job_env = parse_job_env() - tfactor = 1.0 - if "timefactor" in job_env: - tfactor = job_env["timefactor"] - log.info("Using timefactor {}".format(tfactor)) - else: - log.warning( - "DESI_PIPE_RUN_TIMEFACTOR not found in environment, using 1.0." - ) - startup = 0.0 - if "startup" in job_env: - startup = job_env["startup"] - log.info("Using worker startup of {} minutes".format(startup)) - else: - log.warning( - "DESI_PIPE_RUN_STARTUP not found in environment, using 0.0." - ) - worker_size = 0 - if "workersize" in job_env: - worker_size = job_env["workersize"] - log.info("Found worker size of {} from environment".format(worker_size)) - else: - # We have no information from the planning, so fall back to using the - # default for this task type or else one node as the worker size. - worker_size = task_classes[tasktype].run_max_procs() - if worker_size == 0: - worker_size = procs_per_node - log.warning( - "DESI_PIPE_RUN_WORKER_SIZE not found in environment, using {}." - .format(worker_size) - ) - nworker = 0 - if "workers" in job_env: - nworker = job_env["workers"] - log.info("Found {} workers from environment".format(nworker)) - else: - # We have no information from the planning - nworker = nproc // worker_size - if nworker == 0: - nworker = 1 - log.warning( - "DESI_PIPE_RUN_WORKERS not found in environment, using {}." - .format(nworker) - ) - if nworker > nproc: - msg = "Number of workers ({}) larger than number of procs ({}). This should never happen and means that the job script may have been changed by hand.".format(nworker, nproc) - raise RuntimeError(msg) - - # A "group" of processes is identical in size to the worker_size above. - # However, there may be more process groups than workers. This can happen - # if we reduced the number of workers due to some tasks being completed, - # or if there is a "partial" process group remaining when the worker size - # does not evenly divide into the total number of processes. We compute - # the process group information here so that the calling code can use it - # directly if splitting the communicator. - - ngroup = nproc // worker_size - if ngroup * worker_size < nproc: - # We have a leftover partial process group - ngroup += 1 - - groups = [(x // worker_size, x % worker_size) for x in range(nproc)] - - # Compute the task distribution - - if ntask == 0: - # All tasks are done! - return worker_size, groups, list(), [(-1, 0) for x in range(ngroup)] - - if nworker > len(runtasks): - # The number of workers set at job planning time is larger - # than the number of tasks that remain to be done. Reduce - # the number of workers. - log.info( - "Job has {} workers but only {} tasks to run. Reducing number of workers to match." - .format(nworker, len(runtasks)) - ) - nworker = len(runtasks) - - (worktasks, worktimes, workdist) = compute_worker_tasks( - tasktype, runtasks, tfactor, nworker, worker_size, - startup=startup, db=db) - - # Compute the times for each worker- just for information - workertimes, workermin, workermax = worker_times( - worktimes, workdist, startup=startup) - log.info( - "{} workers have times ranging from {} to {} minutes" - .format(nworker, workermin, workermax) - ) - - dist = list() - - for g in range(ngroup): - if g < nworker: - # This process group is a being used as a worker. Assign it the - # tasks. - dist.append(workdist[g]) - else: - # This process group is idle (not acting as a worker) or contains - # the leftover processes to make a whole number of nodes. - dist.append([]) - - return worker_size, groups, worktasks, dist - - -def run_task_list(tasktype, tasklist, opts, comm=None, db=None, force=False): - """Run a collection of tasks of the same type. - - This function requires that the DESI environment variables are set to - point to the current production directory. - - This function first takes the communicator and uses the maximum processes - per task to split the communicator and form groups of processes of - the desired size. It then takes the list of tasks and uses their relative - run time estimates to assign tasks to the process groups. Each process - group loops over its assigned tasks. - - If the database is not specified, no state tracking will be done and the - filesystem will be checked as needed to determine the current state. - - Only tasks that are ready to run (based on the filesystem checks or the - database) will actually be attempted. - - Args: - tasktype (str): the pipeline step to process. - tasklist (list): the list of tasks. All tasks should be of type - "tasktype" above. - opts (dict): the global options (for example, as read from the - production options.yaml file). - comm (mpi4py.Comm): the full communicator to use for whole set of tasks. - db (pipeline.db.DB): The optional database to update. - force (bool): If True, ignore database and filesystem state and just - run the tasks regardless. - - Returns: - tuple: the number of ready tasks, number that are done, and the number - that failed. - - """ - from .tasks.base import task_classes, task_type - log = get_logger() - - nproc = 1 - rank = 0 - if comm is not None: - nproc = comm.size - rank = comm.rank - - # Compute the number of processes that share a node. - - procs_per_node = 1 - if comm is not None: - import mpi4py.MPI as MPI - nodecomm = comm.Split_type(MPI.COMM_TYPE_SHARED, 0) - procs_per_node = nodecomm.size - - # Total number of input tasks - ntask = len(tasklist) - - # Get the options for this task type. - - options = opts[tasktype] - - # Get the tasks that still need to be done. - - groupsize = None - groups = None - worktasks = None - dist = None - if rank == 0: - groupsize, groups, worktasks, dist = run_dist( - tasktype, tasklist, db, nproc, procs_per_node, force=force - ) - - comm_group = None - comm_rank = comm - if comm is not None: - groupsize = comm.bcast(groupsize, root=0) - groups = comm.bcast(groups, root=0) - worktasks = comm.bcast(worktasks, root=0) - dist = comm.bcast(dist, root=0) - # Determine if we need to split the communicator. Are any processes - # in a group larger than one? - largest_rank = np.max([x[1] for x in groups]) - if largest_rank > 0: - comm_group = comm.Split(color=groups[rank][0], key=groups[rank][1]) - comm_rank = comm.Split(color=groups[rank][1], key=groups[rank][0]) - - # How many original tasks did we have and how many were done? - ntask = len(tasklist) - ndone = ntask - len(worktasks) - - # every group goes and does its tasks... - - rundir = io.get_pipe_rundir() - logdir = os.path.join(rundir, io.get_pipe_logdir()) - - group = groups[rank][0] - group_rank = groups[rank][1] - ## group_firsttask = dist[group][0] - ## group_ntask = dist[group][1] - group_ntask = len(dist[group]) - - failcount = 0 - group_failcount = 0 - - if group_ntask > 0: - if group_rank == 0: - log.debug( - "Group {}, running {} tasks".format(group, len(dist[group])) - ) - - for t in dist[group]: - # For this task, determine the output log file. If the task has - # the "night" key in its name, then use that subdirectory. - # Otherwise, if it has the "pixel" key, use the appropriate - # subdirectory. - tt = task_type(worktasks[t]) - fields = task_classes[tt].name_split(worktasks[t]) - - tasklog = None - if "night" in fields: - tasklogdir = os.path.join(logdir, io.get_pipe_nightdir(), - "{:08d}".format(fields["night"])) - # (this directory should have been made during the prod update) - tasklog = os.path.join(tasklogdir, - "{}.log".format(worktasks[t])) - elif "pixel" in fields: - tasklogdir = os.path.join(logdir, "healpix", - io.healpix_subdirectory(fields["nside"],fields["pixel"])) - # When creating this directory, there MIGHT be conflicts from - # multiple processes working on pixels in the same - # sub-directories... - try : - if not os.path.isdir(os.path.dirname(tasklogdir)): - os.makedirs(os.path.dirname(tasklogdir)) - except FileExistsError: - pass - try : - if not os.path.isdir(tasklogdir): - os.makedirs(tasklogdir) - except FileExistsError: - pass - tasklog = os.path.join(tasklogdir, - "{}.log".format(worktasks[t])) - - failedprocs = run_task(worktasks[t], options, comm=comm_group, - logfile=tasklog, db=db) - - if failedprocs > 0: - group_failcount += 1 - log.debug("{} failed; group_failcount now {}".format( - worktasks[t], group_failcount)) - - failcount = group_failcount - - # Every process in each group has the fail count for the tasks assigned to - # its group. To get the total onto all processes, we just have to do an - # allreduce across the rank communicator. - - if comm_rank is not None: - failcount = comm_rank.allreduce(failcount) - - if rank == 0: - log.debug("Tasks done; {} failed".format(failcount)) - - if db is not None and rank == 0 : - # postprocess the successful tasks - - log.debug("postprocess the successful tasks") - - states = db.get_states(worktasks) - - log.debug("states={}".format(states)) - log.debug("runtasks={}".format(worktasks)) - - with db.cursor() as cur : - for name in worktasks : - if states[name] == "done" : - log.debug("postprocessing {}".format(name)) - task_classes[tasktype].postprocessing(db,name,cur) - - return ntask, ndone, failcount - - -def run_task_list_db(tasktype, tasklist, comm=None): - """Run a list of tasks using the pipeline DB and options. - - This is a wrapper around run_task_list which uses the production database - and global options file. - - Args: - tasktype (str): the pipeline step to process. - tasklist (list): the list of tasks. All tasks should be of type - "tasktype" above. - comm (mpi4py.Comm): the full communicator to use for whole set of tasks. - - Returns: - tuple: the number of ready tasks, and the number that failed. - - """ - (db, opts) = load_prod("w") - return run_task_list(tasktype, tasklist, opts, comm=comm, db=db) - - -def dry_run(tasktype, tasklist, opts, procs, procs_per_node, db=None, - launch="mpirun -np", force=False): - """Compute the distribution of tasks and equivalent commands. - - This function takes similar arguments as run_task_list() except simulates - the data distribution and commands that would be run if given the specified - number of processes and processes per node. - - This can be used to debug issues with the runtime concurrency or the - actual options that will be passed to the underying main() entry points - for each task. - - This function requires that the DESI environment variables are set to - point to the current production directory. - - Only tasks that are ready to run (based on the filesystem checks or the - database) will actually be attempted. - - NOTE: Since this function is just informative and for interactive use, - we print information directly to STDOUT rather than logging. - - Args: - tasktype (str): the pipeline step to process. - tasklist (list): the list of tasks. All tasks should be of type - "tasktype" above. - opts (dict): the global options (for example, as read from the - production options.yaml file). - procs (int): the number of processes to simulate. - procs_per_node (int): the number of processes per node to simulate. - db (pipeline.db.DB): The optional database to update. - launch (str): The launching command for a job. This is just a - convenience and prepended to each command before the number of - processes. - force (bool): If True, ignore database and filesystem state and just - run the tasks regardless. - - Returns: - Nothing. - - """ - from .tasks.base import task_classes, task_type - log = get_logger() - - prefix = "DRYRUN: " - - # Get the options for this task type. - - options = dict() - if tasktype in opts: - options = opts[tasktype] - - # Get the tasks that still need to be done. - - groupsize, groups, worktasks, dist = run_dist( - tasktype, tasklist, db, procs, procs_per_node, force=force - ) - - # Go through the tasks - - rundir = io.get_pipe_rundir() - logdir = os.path.join(rundir, io.get_pipe_logdir()) - - for group, group_rank in groups: - ## group_firsttask = dist[group][0] - ## group_ntask = dist[group][1] - group_ntask = len(dist[group]) - if group_ntask == 0: - continue - - for t in dist[group]: - # For this task, determine the output log file. If the task has - # the "night" key in its name, then use that subdirectory. - # Otherwise, if it has the "pixel" key, use the appropriate - # subdirectory. - tt = task_type(worktasks[t]) - fields = task_classes[tt].name_split(worktasks[t]) - - tasklog = None - if "night" in fields: - tasklogdir = os.path.join(logdir, io.get_pipe_nightdir(), - "{:08d}".format(fields["night"])) - # (this directory should have been made during the prod update) - tasklog = os.path.join(tasklogdir, - "{}.log".format(worktasks[t])) - elif "pixel" in fields: - tasklogdir = os.path.join(logdir, "healpix", - io.healpix_subdirectory(fields["nside"],fields["pixel"])) - tasklog = os.path.join(tasklogdir, - "{}.log".format(worktasks[t])) - - com = task_classes[tt].run_cli(worktasks[t], options, groupsize, - launch=launch, log=tasklog, db=db) - print("{} {}".format(prefix, com)) - sys.stdout.flush() - - print("{}".format(prefix)) - sys.stdout.flush() - - return diff --git a/deprecated/py/desispec/pipeline/scriptgen.py b/deprecated/py/desispec/pipeline/scriptgen.py deleted file mode 100644 index 4b10249cc..000000000 --- a/deprecated/py/desispec/pipeline/scriptgen.py +++ /dev/null @@ -1,426 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.scriptgen -=========================== - -Tools for generating shell and slurm scripts. -""" - -from __future__ import absolute_import, division, print_function - -import os -import stat -import sys -import re - -import numpy as np - -from desiutil.log import get_logger - -from .. import io - -from ..parallel import (dist_uniform, dist_discrete, dist_discrete_all, - stdouterr_redirected) - -from .prod import task_read, task_write - -from .plan import nersc_machine, nersc_job_size - - -def dump_job_env(fh, tfactor, startup, nworker, workersize): - """Write parameters needed at runtime to an open filehandle. - """ - fh.write("export DESI_PIPE_RUN_TIMEFACTOR={}\n".format(tfactor)) - fh.write("export DESI_PIPE_RUN_STARTUP={}\n".format(startup)) - fh.write("export DESI_PIPE_RUN_WORKERS={}\n\n".format(nworker)) - fh.write("export DESI_PIPE_RUN_WORKER_SIZE={}\n\n".format(workersize)) - return - - -def parse_job_env(): - """Retrieve job parameters from the environment. - """ - par = dict() - if "DESI_PIPE_RUN_TIMEFACTOR" in os.environ: - par["timefactor"] = float(os.environ["DESI_PIPE_RUN_TIMEFACTOR"]) - if "DESI_PIPE_RUN_STARTUP" in os.environ: - par["startup"] = float(os.environ["DESI_PIPE_RUN_STARTUP"]) - if "DESI_PIPE_RUN_WORKERS" in os.environ: - par["workers"] = int(os.environ["DESI_PIPE_RUN_WORKERS"]) - if "DESI_PIPE_RUN_WORKER_SIZE" in os.environ: - par["workersize"] = int(os.environ["DESI_PIPE_RUN_WORKER_SIZE"]) - return par - - -def shell_job(path, logroot, desisetup, commands, comrun="", mpiprocs=1, - openmp=1,debug=False): - if len(commands) == 0: - raise RuntimeError("List of commands is empty") - with open(path, "w") as f: - f.write("#!/bin/bash\n\n") - f.write("now=`date +%Y%m%d-%H%M%S`\n") - f.write("export STARTTIME=${now}\n") - f.write("log={}_${{now}}.log\n\n".format(logroot)) - f.write("source {}\n\n".format(desisetup)) - - f.write("# Force the script to exit on errors from commands\n") - f.write("set -e\n\n") - - f.write("export OMP_NUM_THREADS={}\n\n".format(openmp)) - if debug: - f.write("export DESI_LOGLEVEL=DEBUG\n\n") - - run = "" - if comrun != "": - run = "{} {}".format(comrun, mpiprocs) - for com in commands: - executable = com.split(" ")[0] - # f.write("which {}\n".format(executable)) - f.write("echo logging to ${log}\n") - f.write("time {} {} >>${{log}} 2>&1\n\n".format(run, com)) - mode = stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH - os.chmod(path, mode) - return - - -def nersc_job(jobname, path, logroot, desisetup, commands, machine, queue, - nodes, cnodes, ppns, minutes, nworker, workersize, multisrun=False, - openmp=False, multiproc=False, shifterimg=None, debug=False): - """Create a SLURM script for use at NERSC. - - Args: - - - """ - if len(commands) == 0: - raise RuntimeError("List of commands is empty") - hostprops = nersc_machine(machine, queue) - - if nodes > hostprops["maxnodes"]: - raise RuntimeError("request nodes '{}' is too large for {} queue '{}'"\ - .format(nodes, machine, queue)) - - if minutes > hostprops["maxtime"]: - raise RuntimeError("request time '{}' is too long for {} queue '{}'"\ - .format(minutes, machine, queue)) - - hours = int(minutes/60) - fullmin = int(minutes - 60*hours) - timestr = "{:02d}:{:02d}:00".format(hours, fullmin) - - totalnodes = nodes - if multisrun: - # we are running every command as a separate srun - # and backgrounding them. In this case, the nodes - # given are per command, so we need to compute the - # total. - totalnodes = nodes * len(commands) - - with open(path, "w") as f: - f.write("#!/bin/bash -l\n\n") - if shifterimg is not None: - f.write("#SBATCH --image={}\n".format(shifterimg)) - - for line in hostprops["sbatch"]: - f.write("{}\n".format(line)) - - f.write("#SBATCH --account=desi\n") - f.write("#SBATCH --nodes={}\n".format(totalnodes)) - f.write("#SBATCH --time={}\n".format(timestr)) - f.write("#SBATCH --job-name={}\n".format(jobname)) - f.write("#SBATCH --output={}_%j.log\n\n".format(logroot)) - - f.write("echo Starting slurm script at `date`\n\n") - f.write("source {}\n\n".format(desisetup)) - - f.write("# Force the script to exit on errors from commands\n") - f.write("set -e\n\n") - - f.write("# Set TMPDIR to be on the ramdisk\n") - f.write("export TMPDIR=/dev/shm\n\n") - - f.write("cpu_per_core={}\n".format(hostprops["corecpus"])) - f.write("node_cores={}\n\n".format(hostprops["nodecores"])) - - if debug: - f.write("export DESI_LOGLEVEL=DEBUG\n\n") - - f.write("now=`date +%Y%m%d-%H%M%S`\n") - f.write("echo \"job datestamp = ${now}\"\n") - f.write("log={}_${{now}}.log\n\n".format(logroot)) - f.write("envlog={}_${{now}}.env\n".format(logroot)) - f.write("env > ${envlog}\n\n") - for com, cn, ppn, nwrk, wrksz in zip( - commands, cnodes, ppns, nworker, workersize): - if ppn > hostprops["nodecores"]: - raise RuntimeError("requested procs per node '{}' is more than" - " the number of cores per node on {}".format(ppn, machine)) - f.write("nodes={}\n".format(cn)) - f.write("node_proc={}\n".format(ppn)) - f.write("node_thread=$(( node_cores / node_proc ))\n") - f.write("node_depth=$(( cpu_per_core * node_thread ))\n") - f.write("procs=$(( nodes * node_proc ))\n\n") - dump_job_env(f, hostprops["timefactor"], hostprops["startup"], - nwrk, wrksz) - if openmp: - f.write("export OMP_NUM_THREADS=${node_thread}\n") - f.write("export OMP_PLACES=threads\n") - f.write("export OMP_PROC_BIND=spread\n") - else: - f.write("export OMP_NUM_THREADS=1\n") - f.write("\n") - runstr = "srun" - if multiproc: - runstr = "{} --cpu_bind=no".format(runstr) - f.write("export KMP_AFFINITY=disabled\n") - f.write("\n") - else: - runstr = "{} --cpu_bind=cores".format(runstr) - - if shifterimg is None: - f.write("run=\"{} -n ${{procs}} -N ${{nodes}} -c " - "${{node_depth}}\"\n\n".format(runstr)) - else: - f.write("run=\"{} -n ${{procs}} -N ${{nodes}} -c " - "${{node_depth}} shifter\"\n\n".format(runstr)) - - comlist = com.split(" ") - executable = comlist.pop(0) - f.write("ex=`which {}`\n".format(executable)) - f.write("app=\"${ex}.app\"\n") - f.write("if [ -x ${app} ]; then\n") - f.write(" if [ ${ex} -nt ${app} ]; then\n") - f.write(" app=${ex}\n") - f.write(" fi\n") - f.write("else\n") - f.write(" app=${ex}\n") - f.write("fi\n") - f.write("echo calling {} at `date`\n\n".format(executable)) - f.write("export STARTTIME=`date +%Y%m%d-%H%M%S`\n") - f.write("echo ${{run}} ${{app}} {}\n".format(" ".join(comlist))) - f.write("time ${{run}} ${{app}} {} >>${{log}} 2>&1".format(" ".join(comlist))) - if multisrun: - f.write(" &") - f.write("\n\n") - if multisrun: - f.write("wait\n\n") - - f.write("echo done with slurm script at `date`\n") - - return - - -def batch_shell(tasks_by_type, outroot, logroot, mpirun="", mpiprocs=1, - openmp=1, db=None): - """Generate bash script(s) to process lists of tasks. - - Given sets of task lists, generate a script that processes each in order. - - Args: - tasks_by_type (OrderedDict): Ordered dictionary of the tasks for each - type to be written to a single job script. - outroot (str): root output script name. - logroot (str): root output log name. - mpirun (str): optional command to use for launching MPI programs. - mpiprocs (int): if mpirun is specified, use this number of processes. - openmp (int): value to set for OMP_NUM_THREADS. - db (DataBase): the pipeline database handle. - - Returns: - (list): list of generated script files. - - """ - from .tasks.base import task_classes, task_type - - # Get the location of the setup script from the production root. - proddir = os.path.abspath(io.specprod_root()) - desisetup = os.path.abspath(os.path.join(proddir, "setup.sh")) - - dbstr = "" - if db is None: - dbstr = "--nodb" - - coms = list() - - for t, tasklist in tasks_by_type.items(): - if len(tasklist) == 0: - raise RuntimeError("{} task list is empty".format(t)) - - taskfile = "{}_{}.tasks".format(outroot, t) - task_write(taskfile, tasklist) - - if mpiprocs > 1: - coms.append("desi_pipe_exec_mpi --tasktype {} --taskfile {} {}"\ - .format(t, taskfile, dbstr)) - else: - coms.append("desi_pipe_exec --tasktype {} --taskfile {} {}"\ - .format(t, taskfile, dbstr)) - - outfile = "{}.sh".format(outroot) - - shell_job(outfile, logroot, desisetup, coms, comrun=mpirun, - mpiprocs=mpiprocs, openmp=openmp) - - return [ outfile ] - - -def batch_nersc(tasks_by_type, outroot, logroot, jobname, machine, queue, - maxtime, maxnodes, nodeprocs=None, openmp=False, multiproc=False, db=None, - shifterimg=None, debug=False): - """Generate slurm script(s) to process lists of tasks. - - Given sets of task lists and constraints about the machine, generate slurm - scripts for use at NERSC. - - Args: - tasks_by_type (OrderedDict): Ordered dictionary of the tasks for each - type to be written to a single job script. - outroot (str): root output script name. - logroot (str): root output log name. - jobname (str): the name of the job. - machine (str): the NERSC machine name. - queue (str): the name of the queue - maxtime (int): the maximum run time in minutes. - maxnodes (int): the maximum number of nodes to use. - nodeprocs (int): the number of processes to use per node. - openmp (bool): if True, set OMP_NUM_THREADS to the correct value. - multiproc (bool): if True, use OMP_NUM_THREADS=1 and disable core - binding of processes. - db (DataBase): the pipeline database handle. - shifter (str): the name of the shifter image to use. - debug (bool): if True, set DESI log level to DEBUG in the script. - - Returns: - (list): list of generated slurm files. - - """ - from .tasks.base import task_classes, task_type - - # Get the location of the setup script from the production root. - proddir = os.path.abspath(io.specprod_root()) - desisetup = os.path.abspath(os.path.join(proddir, "setup.sh")) - - joblist = dict() - - # How many pipeline steps are we trying to pack? - npacked = len(tasks_by_type) - - for t, tasklist in tasks_by_type.items(): - if len(tasklist) == 0: - raise RuntimeError("{} task list is empty".format(t)) - # Compute job size for this task type - if npacked > 1: - joblist[t] = nersc_job_size( - t, tasklist, machine, queue, maxtime, maxnodes, - nodeprocs=nodeprocs, db=db - ) - else: - # Safe to load balance - joblist[t] = nersc_job_size( - t, tasklist, machine, queue, maxtime, maxnodes, - nodeprocs=nodeprocs, db=db, balance=True - ) - # If we are packing multiple pipeline steps, but one of those steps - # is already too large to fit within queue constraints, then this - # makes no sense. - if (len(joblist[t]) > 1) and (npacked > 1): - log = get_logger() - log.info("{} {} queue, maxtime={}, maxnodes={}".format( - machine, queue, maxtime, maxnodes)) - log.info("{} {} tasks -> {} jobs".format( - len(tasklist), t, len(joblist[t]))) - raise RuntimeError("Cannot batch multiple pipeline steps, " - "each with multiple jobs") - - dbstr = "" - if db is None: - dbstr = "--nodb" - - scriptfiles = list() - - log = get_logger() - - # Add an extra 20 minutes (!) to the overall job runtime as a buffer - # against system issues. - runtimebuffer = 20.0 - - if npacked == 1: - # We have a single pipeline step which might be split into multiple - # job scripts. - jindx = 0 - tasktype = list(tasks_by_type.keys())[0] - for (nodes, ppn, runtime, nworker, workersize, tasks) \ - in joblist[tasktype]: - joblogroot = None - joboutroot = None - if jindx>0: - joblogroot = "{}_{}".format(logroot, jindx) - joboutroot = "{}_{}".format(outroot, jindx) - else: - joblogroot = logroot - joboutroot = outroot - - taskfile = "{}.tasks".format(joboutroot) - task_write(taskfile, tasks) - coms = [ "desi_pipe_exec_mpi --tasktype {} --taskfile {} {}"\ - .format(tasktype, taskfile, dbstr) ] - outfile = "{}.slurm".format(joboutroot) - - log.debug("writing job {}".format(outfile)) - - runtime += runtimebuffer - - nersc_job(jobname, outfile, joblogroot, desisetup, coms, machine, - queue, nodes, [ nodes ], [ ppn ], runtime, [ nworker ], - [ workersize ], openmp=openmp, multiproc=multiproc, - shifterimg=shifterimg, debug=debug) - scriptfiles.append(outfile) - jindx += 1 - - else: - # We are packing multiple pipeline steps into a *single* job script. - # We have already verified that each step fits within the machine - # and queue constraints. We use the largest job size. - fullnodes = 0 - fullruntime = 0 - for t in tasks_by_type.keys(): - for (nodes, ppn, runtime, nworker, workersize, tasks) in joblist[t]: - if nodes > fullnodes: - fullnodes = nodes - fullruntime += runtime - - # Verify that this total does not exceed the machine limits - hostprops = nersc_machine(machine, queue) - if fullruntime > hostprops["maxtime"]: - raise RuntimeError("Packed pipeline jobs exceed time limit") - - coms = list() - ppns = list() - cnodes = list() - nwk = list() - wrksz = list() - for t, tasklist in tasks_by_type.items(): - (nodes, ppn, runtime, nworker, workersize, tasks) = joblist[t][0] - taskfile = "{}_{}.tasks".format(outroot, t) - task_write(taskfile, tasks) - coms.append("desi_pipe_exec_mpi --tasktype {} --taskfile {} {}"\ - .format(t, taskfile, dbstr)) - ppns.append(ppn) - cnodes.append(nodes) - nwk.append(nworker) - wrksz.append(workersize) - - outfile = "{}.slurm".format(outroot) - - fullruntime += runtimebuffer - - nersc_job(jobname, outfile, logroot, desisetup, coms, machine, - queue, fullnodes, cnodes, ppns, fullruntime, nwk, wrksz, - openmp=openmp, multiproc=multiproc, shifterimg=shifterimg, - debug=debug) - scriptfiles.append(outfile) - - return scriptfiles diff --git a/deprecated/py/desispec/pipeline/tasks/__init__.py b/deprecated/py/desispec/pipeline/tasks/__init__.py deleted file mode 100644 index a1f9eed5a..000000000 --- a/deprecated/py/desispec/pipeline/tasks/__init__.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks -======================= - -Classes that describe pipeline tasks. -""" - -from __future__ import absolute_import, division, print_function - -# On first import, load all the classes that we have, based on the files in -# this directory. Every file should be named after the type of task (psf, -# frame, etc), and every file should define a class named "TaskBlah" where -# "Blah" can be whatever string you want. - -# We could use class (rather than instance) methods in all these task classes -# (since we generally only have one instance in this dictionary below). -# However, by using instances here we leave open the possibility to pass in -# configuration information in the constructors in the future. - -from . import base - -if base.task_classes is None: - import sys - import re - import pkgutil - import inspect - base.task_classes = dict() - tasknamepat = re.compile(r".*\.(.*)") - taskclasspat = re.compile(r"Task.*") - __path__ = pkgutil.extend_path(__path__, __name__) - for importer, modname, ispkg in pkgutil.walk_packages(path=__path__, - prefix=__name__+'.'): - # "modname" is now the name relative to this package (e.g. tasks.foo). - # Split out the "foo" part, since that is the name of the task type - # we are adding. - tasknamemat = tasknamepat.match(modname) - if tasknamemat is None: - raise RuntimeError("task submodule name error") - taskname = tasknamemat.group(1) - if taskname=="base": continue - - # import the module - __import__(modname) - # search the classes in the module for the Task class. - taskclass = None - is_class_member = lambda member: inspect.isclass(member) and \ - member.__module__ == modname - classmembers = inspect.getmembers(sys.modules[modname], - is_class_member) - for classname, classobj in classmembers: - taskclassmat = taskclasspat.match(classname) - if taskclassmat is not None: - taskclass = classobj - break - if (taskclass is None) and (taskname != "base"): - raise RuntimeError("No Task class found for task {}"\ - .format(taskname)) - # add the class to the dictionary. - base.task_classes[taskname] = taskclass() - base.default_task_chain = ["preproc", "psf", "psfnight", "traceshift", - "extract", "fiberflat", "fiberflatnight", "sky", "starfit", - "fluxcalib", "cframe", "spectra", "redshift"] diff --git a/deprecated/py/desispec/pipeline/tasks/base.py b/deprecated/py/desispec/pipeline/tasks/base.py deleted file mode 100644 index 8f3caaa46..000000000 --- a/deprecated/py/desispec/pipeline/tasks/base.py +++ /dev/null @@ -1,623 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.base -============================ - -Common operations for pipeline tasks. -""" - -from __future__ import absolute_import, division, print_function - -import sys -import os -import re -import time -import socket -import traceback -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from desiutil.log import get_logger - -task_classes = None - -default_task_chain = None - - -def task_type(name): - """Given a task name, find the type from the list of available ones. - - Args: - name (str): the name of the task. - - Returns: - str: the type of the task. - - """ - global task_classes - avail = list(task_classes.keys()) - - # We sort by string length, so that shorter types contained in the name - # of longer ones do not match first. - savail = list(sorted(avail, key=len)[::-1]) - - tt = None - for av in savail: - if re.search(av, name) is not None: - tt = av - break - return tt - - -# This class is named "BaseTask", not "TaskBase" to avoid regex matching with -# the automatic loading found in _taskclass.py. - -class BaseTask(object): - """Base class for tasks. - - This defines the interfaces for the classes representing pipeline tasks. - This class should not be instantiated directly. - - """ - def __init__(self): - self._type = "base" - self._cols = [] # database columns - self._coltypes = [] - self._name_fields = [] # name fields. note that name fields have to be included in cols - self._name_formats = [] # name field formats - - def _name_split(self, name): - fields = name.split(task_name_sep) - if (len(fields) != len(self._name_fields)+1) or (fields[0] != self._type): - raise RuntimeError("name \"{}\" not valid for a {}".format(name,self._type)) - ret = dict() - for i,k in enumerate(self._name_fields) : - # first part of the name is the type, like fibermap-YYYYMMDD-EXPID - if re.match(r".*d.*", self._name_formats[i]) is not None: - # This is an integer field - ret[k] = int(fields[i+1]) - else: - ret[k] = fields[i+1] - return ret - - - def name_split(self, name): - """Split a task name into its properties. - - Args: - name (str): the task name. - - Returns: - dict: dictionary of properties. - - """ - return self._name_split(name) - - - def _name_join(self, props): - ret=self._type - for field,fieldformat in zip(self._name_fields,self._name_formats) : - ret += format(task_name_sep) - ret += format(props[field], fieldformat) - return ret - - - def name_join(self, props): - """Construct a task name from its properties. - - Args: - props (dict): dictionary of properties. - - Returns: - str: the task name. - - """ - return self._name_join(props) - - - def _paths(self, name): - raise NotImplementedError("You should not use a BaseTask object " - " directly") - return None - - - def paths(self, name): - """The filesystem path(s) associated with this task. - - Args: - name (str): the task name. - - Returns: - list: the list of output files generated by this task. - - """ - return self._paths(name) - - - def _create(self, db): - """See BaseTask.create. - """ - with db.cursor() as cur: - createstr = "create table {} (name text unique".format(self._type) - for col in zip(self._cols, self._coltypes): - createstr = "{}, {} {}".format(createstr, col[0], col[1]) - createstr = "{}, submitted integer)".format(createstr) - cur.execute(createstr) - return - - - def create(self, db): - """Initialize a database for this task type. - - This may include creating one or more tables. - - Args: - db (pipeline.DB): the database instance. - - """ - self._create(db) - return - - - def _insert(self, cursor, props): - """See BaseTask.insert. - """ - log = get_logger() - - name = self.name_join(props) - colstr = '(name' - valstr = "('{}'".format(name) - - #cmd='insert or replace into {} values ("{}"'.format(self._type, name) - for k, ktype in zip(self._cols, self._coltypes): - colstr += ', {}'.format(k) - if k == "state": - if k in props: - valstr += ', {}'.format(task_state_to_int[props["state"]]) - else: - valstr += ', {}'.format(task_state_to_int["waiting"]) - else: - if ktype == "text": - valstr += ", '{}'".format(props[k]) - else: - valstr += ', {}'.format(props[k]) - colstr += ', submitted)' - valstr += ', 0)' - - cmd = 'insert into {} {} values {}'.format(self._type, colstr, valstr) - log.debug(cmd) - cursor.execute(cmd) - return - - - def insert(self, cursor, props): - """Insert a task into a database. - - This uses the name and extra keywords to update one or more - task-specific tables. - - Args: - cursor (DB cursor): the database cursor of an open connection. - props (dict): dictionary of properties for the task. - - """ - - log = get_logger() - log.debug("inserting {}".format(self.name_join(props))) - - self._insert(cursor, props) - return - - - def _retrieve(self, db, name): - """See BaseTask.retrieve. - """ - ret = dict() - with db.cursor() as cur: - cur.execute(\ - "select * from {} where name = '{}'".format(self._type,name)) - row = cur.fetchone() - if row is None: - raise RuntimeError("task {} not in database".format(name)) - ret["name"] = name - for i,k in enumerate(self._cols[1:]) : - if k == "state" : - ret[k] = task_int_to_state(row[i]) - else : - ret[k] = row[i] - return ret - - - def retrieve(self, db, name): - """Retrieve all task information from the DB. - - This may include additional information beyond the contents of the - task name (e.g. from other tables). - - Args: - db (pipeline.DB): the database instance. - name (str): the task name. - - Returns: - dict: dictionary of properties for the task. - - """ - return self._retrieve(db, name) - - - def _state_set(self, db, name, state, cur=None): - """See BaseTask.state_set. - """ - start = time.time() - - cmd="update {} set state = {} where name = '{}'"\ - .format(self._type, task_state_to_int[state], name) - - if cur is None : - with db.cursor() as cur: - cur.execute(cmd) - else : - cur.execute(cmd) - - stop = time.time() - log = get_logger() - log.debug("took {:.3f} sec for {} {}".format(stop-start,name, state)) - return - - - def _state_get(self, db, name, cur=None): - """See BaseTask.state_get. - """ - - st = None - cmd = "select state from {} where name = '{}'"\ - .format(self._type,name) - if cur is None : - with db.cursor() as cur: - cur.execute(cmd) - row = cur.fetchone() - else : - cur.execute(cmd) - row = cur.fetchone() - - if row is None: - raise RuntimeError("task {} not in database".format(name)) - st = task_int_to_state[row[0]] - return st - - - def state_set(self, db, name, state, cur=None): - """Set the state of a task. - - This should not be called repeatedly if you are setting the state of - many tasks. It is more efficient to do that in a single SQL command. - - Args: - db (pipeline.DB): the database instance. - name (str): the task name. - - """ - self._state_set(db, name, state, cur) - return - - - def state_get(self, db, name, cur=None): - """Get the state of a task. - - This should not be called repeatedly for many tasks- it is more - efficient to get the state of many tasks in a single custom SQL query. - - Args: - db (pipeline.DB): the database instance. - name (str): the task name. - - Returns: - str: the state. - - """ - return self._state_get(db, name, cur) - - - def _deps(self, name, db, inputs): - raise NotImplementedError("You should not use a BaseTask object " - " directly") - return None - - - def deps(self, name, db=None, inputs=None): - """Get the dependencies for a task. - - This gets a list of other tasks which are required. - - Args: - name (str): the task name. - db (pipeline.DB): the optional database instance. - inputs (dict): optional dictionary containing the only input - dependencies that should be considered. - - Returns: - dict: a dictionary of dependencies. The keys are arbitrary and - the values can be either scalar task names or lists of tasks. - - """ - if (db is not None) and (inputs is not None): - raise RuntimeError("Cannot specify both a DB and an input dict") - return self._deps(name, db, inputs) - - - def _run_max_procs(self): - return 0 - - - def run_max_procs(self): - """Maximum number of processes supported by this task type. - - Args: - procs_per_node (int): the number of processes running per node. - - Returns: - int: the maximum number of processes. Zero indicates no limit. - - """ - return self._run_max_procs() - - - def _run_max_mem_proc(self, name, db): - """Return zero (i.e. not a limit) - """ - return 0.0 - - - def run_max_mem_proc(self, name, db=None): - """Maximum memory in GB per process required. - - If zero is returned, it indicates that the memory requirement is so - small that the code can run fully-packed on any system. - - Args: - name (str): the name of the task. - db (pipeline.DB): the optional database instance. - - Returns: - float: the required RAM in GB per process. - - """ - return self._run_max_mem_proc(name, db) - - - def _run_max_mem_task(self, name, db): - """Return zero (i.e. no memory requirement) - """ - return 0.0 - - - def run_max_mem_task(self, name, db=None): - """Maximum memory in GB per task required. - - If zero is returned, it indicates that the memory requirement is so - small that the code can run on a single node. - - Args: - name (str): the name of the task. - db (pipeline.DB): the optional database instance. - - Returns: - float: the required RAM in GB per process. - - """ - return self._run_max_mem_task(name, db) - - - def _run_time(self, name, procs, db): - raise NotImplementedError("You should not use a BaseTask object " - " directly") - return None - - - def run_time(self, name, procs, db=None): - """Estimated runtime for a task at maximum concurrency. - - Args: - name (str): the name of the task. - procs (int): the total number of processes used for this task. - db (pipeline.DB): the optional database instance. - - Returns: - int: estimated minutes of run time. - - """ - return self._run_time(name, procs, db) - - - def _run_defaults(self): - raise NotImplementedError("You should not use a BaseTask object " - " directly") - return None - - - def run_defaults(self): - """Default options. - - This dictionary of default options will be written to the options.yaml - file in a production directory. The options will then be loaded from - that file at run time. - - Changes to this function will only impact newly-created productions, - and these options will be overridden by any changes to the options.yaml - file. - - Returns: - dict: dictionary of default options. - - """ - return self._run_defaults() - - - def _run_cli(self, name, opts, procs, db): - raise NotImplementedError("You should not use a BaseTask object " - " directly") - return None - - - def run_cli(self, name, opts, procs, launch=None, log=None, db=None): - """Return the equivalent command-line interface. - - Args: - name (str): the name of the task. - opts (dict): dictionary of runtime options. - procs (int): The number of processes to use. - launch (str): optional launching command. - log (str): optional log file for output. - db (pipeline.db.DB): The database. - - Returns: - str: a command line. - - """ - comstr = self._run_cli(name, opts, procs, db) - if launch is not None: - comstr = "{} {} {}".format(launch, procs, comstr) - if log is not None: - comstr = "{} >{} 2>&1".format(comstr, log) - return comstr - - - def _run(self, name, opts, comm, db): - raise NotImplementedError("You should not use a BaseTask object " - " directly") - return - - - def run(self, name, opts, comm=None, db=None): - """Run the task. - - Args: - name (str): the name of this task. - opts (dict): options to use for this task. - comm (mpi4py.MPI.Comm): optional MPI communicator. - db (pipeline.db.DB): The database. - - Returns: - int: the number of processes that failed. - - """ - log = get_logger() - - nproc = 1 - rank = 0 - if comm is not None: - nproc = comm.size - rank = comm.rank - - # at info level, write out the equivalent commandline that was used - if rank == 0: - start_time = time.time() - lstr = "(run by pipeline with {} procs)".format(nproc) - com = self.run_cli(name, opts, nproc, db=db) - log.info("{}: {}".format(lstr, com)) - - hostname = socket.gethostname() - log.info("Starting {} at {} on {}".format( - name, time.asctime(), hostname)) - - if 'SLURM_JOB_ID' in os.environ: - jobid = os.getenv('SLURM_JOB_ID') - log.info('slurm job id {}'.format(jobid)) - - failed = 0 - try: - self._run(name, opts, comm, db) - except: - msg = "FAILED: task {} process {}".format(name, rank) - log.error(msg) - exc_type, exc_value, exc_traceback = sys.exc_info() - lines = traceback.format_exception(exc_type, exc_value, - exc_traceback) - log.error("".join(lines)) - failed = 1 - - failcount = 0 - if comm is None: - failcount = failed - else: - failcount = comm.allreduce(failed) - - if failcount > 0: - if rank == 0: - log.error("{} of {} processes raised an exception"\ - .format(failcount, nproc)) - - if rank == 0: - runtime = (time.time() - start_time) / 60 - log.info("Finished {} at {} ({:.1f} min)".format( - name, time.asctime(), runtime)) - - return failcount - - def getready(self, db, name, cur): - """Checks whether dependencies are ready""" - log = get_logger() - deps = self.deps(name, db=db, inputs=None) - ready = True - for dep in deps.values() : - # for each dependency, guess its type - deptype = dep.split(task_name_sep)[0] - # based on the type and dependency name, read state from db - depstate = task_classes[deptype].state_get(db=db,name=dep,cur=cur) - ready &= (depstate=="done") # ready if all dependencies are done - if ready : - # change state to ready - log.debug("{} is ready to run".format(name)) - self.state_set(db=db,name=name,state="ready",cur=cur) - - - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - pass - - - - def run_and_update(self, db, name, opts, comm=None): - """Run the task and update DB state. - - The state of the task is marked as "done" if the command completes - without raising an exception and if the output files exist. - - Args: - db (pipeline.db.DB): The database. - name (str): the name of this task. - opts (dict): options to use for this task. - comm (mpi4py.MPI.Comm): optional MPI communicator. - - Returns: - int: the number of processes that failed. - - """ - nproc = 1 - rank = 0 - if comm is not None: - nproc = comm.size - rank = comm.rank - - failed = self.run(name, opts, comm=comm, db=db) - - if rank == 0: - if failed > 0: - self.state_set(db, name, "failed") - else: - outputs = self.paths(name) - done = True - for out in outputs: - if not os.path.isfile(out): - done = False - failed = nproc - break - if done: - self.state_set(db, name, "done") - # post processing is now done by a single rank in run.run_task_list - else: - self.state_set(db, name, "failed") - return failed diff --git a/deprecated/py/desispec/pipeline/tasks/cframe.py b/deprecated/py/desispec/pipeline/tasks/cframe.py deleted file mode 100644 index e36a3e053..000000000 --- a/deprecated/py/desispec/pipeline/tasks/cframe.py +++ /dev/null @@ -1,147 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.cframe -============================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,copy - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskCFrame(BaseTask): - """Class containing the properties of a sky fit task. - """ - def __init__(self): - super(TaskCFrame, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "cframe" - self._cols = [ - "night", - "band", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("cframe", night=props["night"], expid=props["expid"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "infile" : task_classes["extract"].name_join(props), - "fiberflat" : task_classes["fiberflatnight"].name_join(props), - "sky" : task_classes["sky"].name_join(props), - "calib" : task_classes["fluxcalib"].name_join(props) - } - return deptasks - - def _run_max_procs(self): - """See BaseTask.run_max_procs. - """ - return 1 - - - def _run_time(self, name, procs, db): - """See BaseTask.run_time. - """ - return 2 - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - opts = {} - #opts["sky-throughput-correction"] = True - return opts - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - deps = self.deps(name) - options = {} - options["infile"] = task_classes["extract"].paths(deps["infile"])[0] - options["fiberflat"] = task_classes["fiberflatnight"].paths(deps["fiberflat"])[0] - options["sky"] = task_classes["sky"].paths(deps["sky"])[0] - options["calib"] = task_classes["fluxcalib"].paths(deps["calib"])[0] - options["outfile"] = self.paths(name)[0] - - options.update(opts) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_process_exposure" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import procexp - optlist = self._option_list(name, opts) - args = procexp.parse(optlist) - procexp.main(args) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - props=self.name_split(name) - props["state"]=0 # selection - db.update_healpix_frame_state(props,state=1,cur=cur) # 1=has a cframe - - log = get_logger() - # call getready on all spectra ... might be super inefficient - tt="spectra" - required_healpix_frame_state = 1 # means we have a cframe - cur.execute('select nside,pixel from healpix_frame where state = {} and expid = {} and spec = {}'.format(required_healpix_frame_state,props["expid"],props["spec"])) - entries = cur.fetchall() - log.debug("from {} set spectra to ready : {}".format(name,entries)) - for entry in entries : - cur.execute('update {} set state = {} where nside = {} and pixel = {}'.format(tt,task_state_to_int["ready"],entry[0],entry[1])) diff --git a/deprecated/py/desispec/pipeline/tasks/extract.py b/deprecated/py/desispec/pipeline/tasks/extract.py deleted file mode 100644 index 56e532691..000000000 --- a/deprecated/py/desispec/pipeline/tasks/extract.py +++ /dev/null @@ -1,162 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.extract -=============================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,copy - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskExtract(BaseTask): - """Class containing the properties of one extraction task. - """ - def __init__(self): - super(TaskExtract, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "extract" - self._cols = [ - "night", - "band", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("frame", night=props["night"], expid=props["expid"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "input" : task_classes["preproc"].name_join(props), - "fibermap" : task_classes["fibermap"].name_join(props), - "psf" : task_classes["traceshift"].name_join(props) - } - return deptasks - - def _run_max_procs(self): - # 20 bundles per camera - return 20 - - - def _run_time(self, name, procs, db): - # Time when running on max procs on machine with scale - # factor 1.0 - mprc = self._run_max_procs() - return (7.0 / procs) * mprc - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - opts = {} - opts["regularize"] = 0.0 - opts["nwavestep"] = 50 - opts["verbose"] = False - opts["heliocentric_correction"] = False - opts["wavelength_b"] = "3579.0,5939.0,0.8" - opts["wavelength_r"] = "5635.0,7731.0,0.8" - opts["wavelength_z"] = "7445.0,9824.0,0.8" - opts["psferr"] = 0.05 - return opts - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - deps = self.deps(name) - options = {} - options["input"] = task_classes["preproc"].paths(deps["input"])[0] - options["fibermap"] = task_classes["fibermap"].paths(deps["fibermap"])[0] - options["psf"] = task_classes["traceshift"].paths(deps["psf"])[0] - options["output"] = self.paths(name)[0] - - # extract the wavelength range from the options, depending on the band - props = self.name_split(name) - optscopy = copy.deepcopy(opts) - wkey = "wavelength_{}".format(props["band"]) - wave = optscopy[wkey] - del optscopy["wavelength_b"] - del optscopy["wavelength_r"] - del optscopy["wavelength_z"] - optscopy["wavelength"] = wave - - options.update(optscopy) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_extract_spectra" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import extract - optlist = self._option_list(name, opts) - args = extract.parse(optlist) - if comm is None : - extract.main(args) - else : - extract.main_mpi(args, comm=comm) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready for all extraction with same night,band,spec - props = self.name_split(name) - log = get_logger() - for tt in ["fiberflat","sky"] : - cmd = "select name from {} where night={} and expid={} and band='{}' and spec={} and state=0".format(tt,props["night"],props["expid"],props["band"],props["spec"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {} {}".format(tt,tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/fiberflat.py b/deprecated/py/desispec/pipeline/tasks/fiberflat.py deleted file mode 100644 index c395bc8f2..000000000 --- a/deprecated/py/desispec/pipeline/tasks/fiberflat.py +++ /dev/null @@ -1,133 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.fiberflat -================================= - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,copy - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskFiberflat(BaseTask): - """Class containing the properties of one extraction task. - """ - def __init__(self): - super(TaskFiberflat, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "fiberflat" - self._cols = [ - "night", - "band", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("fiberflat", night=props["night"], expid=props["expid"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "infile" : task_classes["extract"].name_join(props) - } - return deptasks - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 3 - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - opts = {} - return opts - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - options = OrderedDict() - - deps = self.deps(name) - options = {} - options["infile"] = task_classes["extract"].paths(deps["infile"])[0] - options["outfile"] = self.paths(name)[0] - options.update(opts) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_compute_fiberflat" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import fiberflat - optlist = self._option_list(name, opts) - args = fiberflat.parse(optlist) - fiberflat.main(args) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready on all fierflatnight with same night,band,spec - props = self.name_split(name) - log = get_logger() - tt="fiberflatnight" - cmd = "select name from {} where night={} and band='{}' and spec={} and state=0".format(tt,props["night"],props["band"],props["spec"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/fiberflatnight.py b/deprecated/py/desispec/pipeline/tasks/fiberflatnight.py deleted file mode 100644 index fcdeaf6df..000000000 --- a/deprecated/py/desispec/pipeline/tasks/fiberflatnight.py +++ /dev/null @@ -1,152 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.fiberflatnight -====================================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import numpy as np - -import sys,re,os,glob - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskFiberflatNight(BaseTask): - """Class containing the properties of one fiberflat combined night task. - """ - def __init__(self): - super(TaskFiberflatNight, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "fiberflatnight" - self._cols = [ - "night", - "band", - "spec", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec"] - self._name_formats = ["08d","s","d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("fiberflatnight", night=props["night"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - return dict() - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 1 - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - return {} - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - options = OrderedDict() - options["outfile"] = self.paths(name)[0] - - # look for psf for this night on disk - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - dummy_expid = 99999999 - template_input = findfile("fiberflat", night=props["night"], expid=dummy_expid, - camera=camera, - band=props["band"], - spectrograph=props["spec"]) - template_input = template_input.replace("{:08d}".format(dummy_expid),"*") - options["infile"] = glob.glob(template_input) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - return "desi_average_fiberflat {}".format(self._option_list(name, opts)) - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import average_fiberflat - optlist = self._option_list(name, opts) - args = average_fiberflat.parse(optlist) - average_fiberflat.main(args) - - return - - def getready(self, db, name, cur): - """Checks whether dependencies are ready""" - log = get_logger() - - # look for the state of psf with same night,band,spectro - props = self.name_split(name) - - cmd = "select state from fiberflat where night={} and band='{}' and spec={}".format(props["night"],props["band"],props["spec"]) - cur.execute(cmd) - states = np.array([ x for (x,) in cur.fetchall() ]) - log.debug("states={}".format(states)) - - # fiberflatnight ready if all fiberflat from the night have been processed, and at least one is done (failures are allowed) - n_done = np.sum(states==task_state_to_int["done"]) - n_failed = np.sum(states==task_state_to_int["failed"]) - - ready = (n_done > 0) & ( (n_done + n_failed) == states.size ) - if ready : - self.state_set(db=db,name=name,state="ready",cur=cur) - - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready for all sky with same night,band,spec - props = self.name_split(name) - log = get_logger() - tt = "sky" - cmd = "select name from {} where night={} and band='{}' and spec={} and state=0".format(tt,props["night"],props["band"],props["spec"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/fibermap.py b/deprecated/py/desispec/pipeline/tasks/fibermap.py deleted file mode 100644 index 89ac18d28..000000000 --- a/deprecated/py/desispec/pipeline/tasks/fibermap.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.fibermap -================================ - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import BaseTask - - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskFibermap(BaseTask): - """Class containing the properties of one fibermap. - - Since fibermaps have no dependencies and are not created by the pipeline, - this class is just used to specify names, etc. - - """ - def __init__(self): - # do that first - super(TaskFibermap, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "fibermap" - self._cols = [ - "night", - "expid", - "flavor", - "state" - ] - self._coltypes = [ - "integer", - "integer", - "text", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","expid"] - self._name_formats = ["08d","08d"] - - - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - return [ findfile("fibermap", night=props["night"], - expid=props["expid"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - return dict() - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 1 - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - return dict() - - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - return "" - - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - return diff --git a/deprecated/py/desispec/pipeline/tasks/fluxcalib.py b/deprecated/py/desispec/pipeline/tasks/fluxcalib.py deleted file mode 100644 index c38de70d4..000000000 --- a/deprecated/py/desispec/pipeline/tasks/fluxcalib.py +++ /dev/null @@ -1,139 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.fluxcalib -================================= - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,copy - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskFluxCalib(BaseTask): - """Class containing the properties of a sky fit task. - """ - def __init__(self): - super(TaskFluxCalib, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "fluxcalib" - self._cols = [ - "night", - "band", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("calib", night=props["night"], expid=props["expid"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "infile" : task_classes["extract"].name_join(props), - "fiberflat" : task_classes["fiberflatnight"].name_join(props), - "sky" : task_classes["sky"].name_join(props), - "models" : task_classes["starfit"].name_join(props) - } - return deptasks - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 3 - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - opts = {} - return opts - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - deps = self.deps(name) - options = {} - options["infile"] = task_classes["extract"].paths(deps["infile"])[0] - options["fiberflat"] = task_classes["fiberflatnight"].paths(deps["fiberflat"])[0] - options["sky"] = task_classes["sky"].paths(deps["sky"])[0] - options["models"] = task_classes["starfit"].paths(deps["models"])[0] - options["outfile"] = self.paths(name)[0] - - options.update(opts) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_compute_fluxcalibration" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import fluxcalibration - optlist = self._option_list(name, opts) - args = fluxcalibration.parse(optlist) - fluxcalibration.main(args) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready on all fierflatnight with same night,band,spec - props = self.name_split(name) - log = get_logger() - tt="cframe" - cmd = "select name from {} where night={} and expid={} and spec={} and band='{}' and state=0".format(tt,props["night"],props["expid"],props["spec"],props["band"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/preproc.py b/deprecated/py/desispec/pipeline/tasks/preproc.py deleted file mode 100644 index dbd69915f..000000000 --- a/deprecated/py/desispec/pipeline/tasks/preproc.py +++ /dev/null @@ -1,162 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.preproc -=============================== - -""" - -from __future__ import absolute_import, division, print_function - -import os -import re - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import numpy as np - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskPreproc(BaseTask): - """Class containing the properties of one preprocessed pixel file. - """ - def __init__(self): - # do that first - super(TaskPreproc, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "preproc" - self._cols = [ - "night", - "band", - "spec", - "expid", - "flavor", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "text", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("preproc", night=props["night"], expid=props["expid"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "fibermap" : task_classes["fibermap"].name_join(props), - "rawdata" : task_classes["rawdata"].name_join(props) - } - return deptasks - - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 3.0 - - def _run_max_mem_proc(self, name, db): - # Per-process memory requirements - return 0 - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - return dict() - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - dp = self.deps(name) - - options = OrderedDict() - options.update(opts) - - props = self.name_split(name) - options["infile"] = task_classes["rawdata"].paths(dp["rawdata"])[0] - options["cameras"] = "{}{}".format(props["band"],props["spec"]) - - outfile = self.paths(name)[0] - options["outfile"] = outfile - - return option_list(options) - - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_preproc" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import preproc - optlist = self._option_list(name, opts) - args = preproc.parse(optlist) - preproc.main(args) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready for all extraction with same night,band,spec - props = self.name_split(name) - log = get_logger() - tt = "psf" - cmd = "select name from {} where night={} and band='{}' and spec={} and expid={} and state=0".format(tt,props["night"],props["band"],props["spec"],props["expid"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks: - task_classes[tt].getready(db=db, name=task, cur=cur) - tt = "traceshift" - cmd = "select name from {} where night={} and band='{}' and spec={} and expid={} and state=0".format(tt,props["night"],props["band"],props["spec"],props["expid"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks: - task_classes[tt].getready(db=db, name=task, cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/psf.py b/deprecated/py/desispec/pipeline/tasks/psf.py deleted file mode 100644 index eae1e2be4..000000000 --- a/deprecated/py/desispec/pipeline/tasks/psf.py +++ /dev/null @@ -1,169 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.psf -=========================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskPSF(BaseTask): - """Class containing the properties of one PSF task. - """ - def __init__(self): - super(TaskPSF, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "psf" - self._cols = [ - "night", - "band", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("psf", night=props["night"], expid=props["expid"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "input-image" : task_classes["preproc"].name_join(props) - } - return deptasks - - - def _run_max_procs(self): - # 20 bundles per camera - return 20 - - - def _run_time(self, name, procs, db): - # Time when running on max procs on machine with scale - # factor 1.0 - mprc = self._run_max_procs() - return (20.0 / procs) * mprc - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - opts = {} - opts["trace-deg-wave"] = 7 - opts["trace-deg-x"] = 7 - opts["trace-prior-deg"] = 4 - - envname="DESI_SPECTRO_CALIB" - if not envname in os.environ : - raise KeyError("need to set DESI_SPECTRO_CALIB env. variable") - - return opts - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - options = OrderedDict() - - deps = self.deps(name) - props = self.name_split(name) - - # make a copy, so we can remove some entries - opts_copy = opts.copy() - - options["input-image"] = task_classes["preproc"].paths(deps["input-image"])[0] - options["output-psf"] = self.paths(name) - - if "specmin" in opts_copy: - options["specmin"] = opts_copy["specmin"] - del opts_copy["specmin"] - - if "nspec" in opts_copy: - options["nspec"] = opts_copy["nspec"] - del opts_copy["nspec"] - - if len(opts_copy) > 0: - extarray = option_list(opts_copy) - options["extra"] = " ".join(extarray) - - return option_list(options) - - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - - entry = "desi_compute_psf" - if procs > 1: - entry = "desi_compute_psf_mpi" - return "{} {}".format(entry, " ".join(self._option_list(name, opts))) - - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import specex - optlist = self._option_list(name, opts) - - args = specex.parse(optlist) - specex.main(args, comm=comm) - return - - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready on all psfnight with same night,band,spec - props = self.name_split(name) - log = get_logger() - tt="psfnight" - cmd = "select name from {} where night={} and band='{}' and spec={} and state=0".format(tt,props["night"],props["band"],props["spec"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/psfnight.py b/deprecated/py/desispec/pipeline/tasks/psfnight.py deleted file mode 100644 index f11968a25..000000000 --- a/deprecated/py/desispec/pipeline/tasks/psfnight.py +++ /dev/null @@ -1,161 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.psfnight -================================ - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,glob - -import numpy as np - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskPSFNight(BaseTask): - """Class containing the properties of one PSF combined night task. - """ - def __init__(self): - super(TaskPSFNight, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "psfnight" - self._cols = [ - "night", - "band", - "spec", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec"] - self._name_formats = ["08d","s","d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("psfnight", night=props["night"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - return dict() - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 2.0 - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - return {} - - def _option_dict(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - options = OrderedDict() - options["output"] = self.paths(name)[0] - - # look for psf for this night on disk - options["input"] = [] - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - dummy_expid = 99999999 - template_input = findfile("psf", night=props["night"], expid=dummy_expid, - camera=camera, - band=props["band"], - spectrograph=props["spec"]) - template_input = template_input.replace("{:08d}".format(dummy_expid),"????????") - options["input"] = glob.glob(template_input) - return options - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - return option_list(self._option_dict(name,opts)) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - optlist = self._option_list(name, opts) - com = "# command line for psfnight not implemented" - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import specex - optdict = self._option_dict(name, opts) - specex.mean_psf(optdict["input"], optdict["output"]) - - return - - def getready(self, db, name, cur): - """Checks whether dependencies are ready""" - log = get_logger() - - # look for the state of psf with same night,band,spectro - props = self.name_split(name) - - cmd = "select state from psf where night={} and band='{}' and spec={}".format(props["night"],props["band"],props["spec"]) - cur.execute(cmd) - states = np.array([ x for (x,) in cur.fetchall() ]) - log.debug("states={}".format(states)) - - # psfnight ready if all psf from the night have been processed, and at least one is done (failures are allowed) - n_done = np.sum(states==task_state_to_int["done"]) - n_failed = np.sum(states==task_state_to_int["failed"]) - - ready = (n_done > 0) & ( (n_done + n_failed) == states.size ) - if ready : - self.state_set(db=db,name=name,state="ready",cur=cur) - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready for all extraction with same night,band,spec - props = self.name_split(name) - log = get_logger() - tt = "traceshift" - cmd = "select name from {} where night={} and band='{}' and spec={} and state=0".format(tt,props["night"],props["band"],props["spec"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/qadata.py b/deprecated/py/desispec/pipeline/tasks/qadata.py deleted file mode 100644 index 64c2332b6..000000000 --- a/deprecated/py/desispec/pipeline/tasks/qadata.py +++ /dev/null @@ -1,125 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.qadata -============================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,copy - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskQAData(BaseTask): - """Class containing the properties of a sky fit task. - """ - def __init__(self): - super(TaskQAData, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "qadata" - self._cols = [ - "night", - "band", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("qa_data", night=props["night"], expid=props["expid"], - camera=camera) ] # Add qaprod_dir here to have QA land somewhere else? - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "cframe" : task_classes["cframe"].name_join(props), - } - return deptasks - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 2.0 - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - opts = {} - return opts - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - props = self.name_split(name) - options = {} - options["frame_file"] = task_classes["extract"].paths( - task_classes["extract"].name_join(props)) - - options.update(opts) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_qa_frame" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import qa_frame - optlist = self._option_list(name, opts) - args = qa_frame.parse(optlist) - qa_frame.main(args) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready on all fierflatnight with same night,band,spec - pass diff --git a/deprecated/py/desispec/pipeline/tasks/rawdata.py b/deprecated/py/desispec/pipeline/tasks/rawdata.py deleted file mode 100644 index a260185f4..000000000 --- a/deprecated/py/desispec/pipeline/tasks/rawdata.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.rawdata -=============================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import BaseTask - - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskRawdata(BaseTask): - """Class containing the properties of one rawdata. - - Since rawdatas have no dependencies and are not created by the pipeline, - this class is just used to specify names, etc. - - """ - def __init__(self): - # do that first - super(TaskRawdata, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "rawdata" - self._cols = [ - "night", - "expid", - "flavor", - "state" - ] - self._coltypes = [ - "integer", - "integer", - "text", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","expid"] - self._name_formats = ["d","08d"] - - - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - return [ findfile("raw", night=props["night"], - expid=props["expid"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - return dict() - - def _run_max_procs(self): - # This is a fake task. - return 1 - - def _run_time(self, name, procs, db): - # Fake task - return 0.0 - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - return dict() - - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - return "" - - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - return diff --git a/deprecated/py/desispec/pipeline/tasks/redshift.py b/deprecated/py/desispec/pipeline/tasks/redshift.py deleted file mode 100644 index 9534027ed..000000000 --- a/deprecated/py/desispec/pipeline/tasks/redshift.py +++ /dev/null @@ -1,207 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.redshift -================================ - -""" - -from __future__ import absolute_import, division, print_function - -import numpy as np - -from .base import BaseTask, task_classes, task_type -from ...io import findfile -from ...util import option_list -from redrock.external.desi import rrdesi -from desiutil.log import get_logger - -import os - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskRedshift(BaseTask): - """Class containing the properties of one spectra task. - """ - def __init__(self): - super(TaskRedshift, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "redshift" - self._cols = [ - "nside", - "pixel", - "state" - ] - self._coltypes = [ - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["nside","pixel"] - self._name_formats = ["d","d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - hpix = props["pixel"] - nside = props["nside"] - redrock = findfile("redrock", groupname=hpix, nside=nside) - rrdetails = findfile("rrdetails", groupname=hpix, nside=nside) - return [redrock, rrdetails] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - props = self.name_split(name) - deptasks = { - "infile" : task_classes["spectra"].name_join(props) - } - return deptasks - - def _run_max_procs(self): - # Redshifts can run on any number of procs. - return 0 - - def _run_time(self, name, procs, db): - # Run time on one task on machine with scale factor == 1.0. - # This should depend on the total number of unique targets, which is - # not known a priori. Instead, we compute the total targets and reduce - # this by some factor. - if db is not None: - props = self.name_split(name) - entries = db.select_healpix_frame( - {"pixel":props["pixel"], - "nside":props["nside"]} - ) - ntarget = np.sum([x["ntargets"] for x in entries]) - neff = 0.3 * ntarget - # 2.5 seconds per targets - tm = 1 + 2.5 * 0.0167 * neff - else: - tm = 60 - - return tm - - def _run_max_mem_proc(self, name, db): - # Per-process memory requirements. This is determined by the largest - # Spectra file that must be read and broadcast. We compute that size - # assuming no coadd and using the total number of targets falling in - # our pixel. - mem = 0.0 - if db is not None: - props = self.name_split(name) - entries = db.select_healpix_frame( - {"pixel":props["pixel"], - "nside":props["nside"]} - ) - ntarget = np.sum([x["ntargets"] for x in entries]) - # DB entry is for one exposure and spectrograph. - mem = 0.2 + 0.0002 * 3 * ntarget - return mem - - def _run_max_mem_task(self, name, db): - # This returns the total aggregate memory needed for the task, - # which should be based on the larger of: - # 1) the total number of unique (coadded) targets. - # 2) the largest spectra file times the number of processes - # Since it is not easy to calculate (1), and the constraint for (2) - # is already encapsulated in the per-process memory requirements, - # we return zero here. This effectively selects one node. - mem = 0.0 - return mem - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - return {'no-mpi-abort': True} - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - - redrockfile, rrdetailsfile = self.paths(name) - outdir = os.path.dirname(redrockfile) - - options = {} - options["details"] = rrdetailsfile - options["outfile"] = redrockfile - options.update(opts) - - optarray = option_list(options) - - deps = self.deps(name) - specfile = task_classes["spectra"].paths(deps["infile"])[0] - optarray.append(specfile) - - return optarray - - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "rrdesi_mpi" - optlist = self._option_list(name, opts) - return "{} {}".format(entry, " ".join(optlist)) - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - optlist = self._option_list(name, opts) - rrdesi(options=optlist, comm=comm) - return - - def run_and_update(self, db, name, opts, comm=None): - """Run the task and update DB state. - - The state of the task is marked as "done" if the command completes - without raising an exception and if the output files exist. - - It is specific for redshift because the healpix_frame table has to be updated - - Args: - db (pipeline.db.DB): The database. - name (str): the name of this task. - opts (dict): options to use for this task. - comm (mpi4py.MPI.Comm): optional MPI communicator. - - Returns: - int: the number of processes that failed. - - """ - nproc = 1 - rank = 0 - if comm is not None: - nproc = comm.size - rank = comm.rank - - failed = self.run(name, opts, comm=comm, db=db) - - if rank == 0: - if failed > 0: - self.state_set(db, name, "failed") - else: - outputs = self.paths(name) - done = True - for out in outputs: - if not os.path.isfile(out): - done = False - failed = nproc - break - if done: - props=self.name_split(name) - props["state"]=2 # selection, only those for which we had already updated the spectra - with db.cursor() as cur : - self.state_set(db, name, "done",cur=cur) - db.update_healpix_frame_state(props,state=3,cur=cur) # 3=redshifts have been updated - else: - self.state_set(db, name, "failed") - return failed diff --git a/deprecated/py/desispec/pipeline/tasks/sky.py b/deprecated/py/desispec/pipeline/tasks/sky.py deleted file mode 100644 index 5f9aae9e6..000000000 --- a/deprecated/py/desispec/pipeline/tasks/sky.py +++ /dev/null @@ -1,136 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.sky -=========================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,copy - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskSky(BaseTask): - """Class containing the properties of a sky fit task. - """ - def __init__(self): - super(TaskSky, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "sky" - self._cols = [ - "night", - "band", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("sky", night=props["night"], expid=props["expid"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "infile" : task_classes["extract"].name_join(props), - "fiberflat" : task_classes["fiberflatnight"].name_join(props) - } - return deptasks - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 7.0 - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - opts = {} - return opts - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - deps = self.deps(name) - options = {} - options["infile"] = task_classes["extract"].paths(deps["infile"])[0] - options["fiberflat"] = task_classes["fiberflatnight"].paths(deps["fiberflat"])[0] - options["outfile"] = self.paths(name)[0] - - options.update(opts) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_compute_sky" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import sky - optlist = self._option_list(name, opts) - args = sky.parse(optlist) - sky.main(args) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready on all fierflatnight with same night,band,spec - props = self.name_split(name) - log = get_logger() - tt="starfit" - cmd = "select name from {} where night={} and expid={} and spec={} and state=0".format(tt,props["night"],props["expid"],props["spec"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/spectra.py b/deprecated/py/desispec/pipeline/tasks/spectra.py deleted file mode 100644 index a90c0bd14..000000000 --- a/deprecated/py/desispec/pipeline/tasks/spectra.py +++ /dev/null @@ -1,209 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.spectra -=============================== - -""" - -from __future__ import absolute_import, division, print_function - -import numpy as np - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import os - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskSpectra(BaseTask): - """Class containing the properties of one spectra task. - """ - def __init__(self): - super(TaskSpectra, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "spectra" - self._cols = [ - "nside", - "pixel", - "state" - ] - self._coltypes = [ - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["nside","pixel"] - self._name_formats = ["d","d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - return [ findfile("spectra", night=None, expid=None, - camera=None, groupname=props["pixel"], nside=props["nside"], band=None, - spectrograph=None) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - return dict() - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0. - # Get the list of frames and use the size of this list as - # a proxy for determining the runtime. The run time is dominated by - # I/O. - if db is not None: - props = self.name_split(name) - entries = db.select_healpix_frame({"pixel":props["pixel"],"nside":props["nside"]}) - nentry = len(entries) - tm = 10.0 + 1.0 * (nentry / 50.0) - else: - tm = 60.0 - - return tm - - def _run_max_mem_proc(self, name, db): - # Per-process memory requirements - mem = 0.0 - if db is not None: - # Get the list of frames. The frame files touching this pixel will - # be cached in RAM. - props = self.name_split(name) - entries = db.select_healpix_frame({"pixel":props["pixel"],"nside":props["nside"]}) - nframe = len(entries) - # Each frame is about 90MB - mem = 0.090 * nframe - return mem - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - return {} - - def _option_list(self, name, opts, db): - - # we do need db access for spectra - if db is None : - log = get_logger() - log.error("we do need db access for spectra") - raise RuntimeError("we do need db access for spectra") - - from .base import task_classes, task_type - # get pixel - props = self.name_split(name) - # get list of exposures and spectrographs by selecting entries in the - # healpix_frame table with state = 1, which means that there is a new - # cframe intersecting the pixel - entries = db.select_healpix_frame({"pixel":props["pixel"],"nside":props["nside"],"state":1}) - # now select cframe with same props - cframes = [] - for entry in entries : - for band in ["b","r","z"] : - entry_and_band = entry.copy() - entry_and_band["band"] = band - # this will match cframes with same expid and spectro - taskname = task_classes["cframe"].name_join(entry_and_band) - filename = task_classes["cframe"].paths(taskname)[0] - cframes.append(filename) - - options = {} - options["infiles"] = cframes - options["outfile"] = self.paths(name)[0] - options["healpix"] = props["pixel"] - options["nside"] = props["nside"] - - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_update_spectra" - optlist = self._option_list(name, opts, db) - return "{} {}".format(entry, " ".join(optlist)) - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import update_spectra - optlist = self._option_list(name, opts, db) - args = update_spectra.parse(optlist) - update_spectra.main(args) - return - - def run_and_update(self, db, name, opts, comm=None): - """Run the task and update DB state. - - The state of the task is marked as "done" if the command completes - without raising an exception and if the output files exist. - - It is specific for spectra because the healpix_frame table has to be updated - - Args: - db (pipeline.db.DB): The database. - name (str): the name of this task. - opts (dict): options to use for this task. - comm (mpi4py.MPI.Comm): optional MPI communicator. - - Returns: - int: the number of processes that failed. - - """ - nproc = 1 - rank = 0 - if comm is not None: - nproc = comm.size - rank = comm.rank - - failed = self.run(name, opts, comm=comm, db=db) - - if rank == 0: - if failed > 0: - self.state_set(db, name, "failed") - else: - outputs = self.paths(name) - done = True - for out in outputs: - if not os.path.isfile(out): - done = False - failed = nproc - break - if done: - props=self.name_split(name) - props["state"]=1 # selection, only those for which we had a cframe - with db.cursor() as cur : - self.state_set(db, name, "done",cur=cur) - # 2=spectra has been updated - db.update_healpix_frame_state(props,state=2,cur=cur) - # directly set the corresponding redshift to ready - cur.execute( - 'update redshift set state={} where nside = {} and pixel = {}' - .format( - task_state_to_int["ready"], - props["nside"], - props["pixel"] - ) - ) - # post processing is now done by a single rank in - # run.run_task_list - else: - self.state_set(db, name, "failed") - return failed diff --git a/deprecated/py/desispec/pipeline/tasks/starfit.py b/deprecated/py/desispec/pipeline/tasks/starfit.py deleted file mode 100644 index 616f28877..000000000 --- a/deprecated/py/desispec/pipeline/tasks/starfit.py +++ /dev/null @@ -1,205 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.starfit -=============================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,copy - -from desiutil.log import get_logger - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskStarFit(BaseTask): - """Class containing the properties of one extraction task. - """ - def __init__(self): - super(TaskStarFit, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "starfit" - self._cols = [ - "night", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","spec","expid"] - self._name_formats = ["08d","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - return [ findfile("stdstars", night=props["night"], expid=props["expid"], - groupname=None, nside=None, camera=None, band=None, - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - - # we need all the cameras for the fit of standard stars - deptasks = dict() - for band in ["b","r","z"] : - props_and_band = props.copy() - props_and_band["band"] = band - deptasks[band+"-frame"]=task_classes["extract"].name_join(props_and_band) - deptasks[band+"-fiberflat"]=task_classes["fiberflatnight"].name_join(props_and_band) - deptasks[band+"-sky"]=task_classes["sky"].name_join(props_and_band) - return deptasks - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 35.0 - - def _run_max_mem_proc(self, name, db): - # Per-process memory requirements - return 5.0 - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - import glob - - log = get_logger() - - opts = {} - starmodels = None - if "DESI_BASIS_TEMPLATES" in os.environ: - filenames = sorted(glob.glob(os.environ["DESI_BASIS_TEMPLATES"]+"/stdstar_templates_*.fits")) - if len(filenames) > 0 : - starmodels = filenames[-1] - else: - filenames = sorted(glob.glob(os.environ["DESI_BASIS_TEMPLATES"]+"/star_templates_*.fits")) - log.warning('Unable to find stdstar templates in {}; using star templates instead'.format( - os.getenv('DESI_BASIS_TEMPLATES'))) - if len(filenames) > 0 : - starmodels = filenames[-1] - else: - msg = 'Unable to find stdstar or star templates in {}'.format( - os.getenv('DESI_BASIS_TEMPLATES')) - log.error(msg) - raise RuntimeError(msg) - else: - log.error("DESI_BASIS_TEMPLATES not set!") - raise RuntimeError("could not find the stellar templates") - - opts["starmodels"] = starmodels - - opts["delta-color"] = 0.2 - opts["color"] = "G-R" - - return opts - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - - log = get_logger() - - deps = self.deps(name) - options = {} - ### options["ncpu"] = 1 - options["outfile"] = self.paths(name)[0] - options["frames"] = [] - options["skymodels"] = [] - options["fiberflats"] = [] - - # frames skymodels fiberflats - props = self.name_split(name) - for band in ["b", "r", "z"] : - props_and_band = props.copy() - props_and_band["band"] = band - - task = task_classes["extract"].name_join(props_and_band) - frame_filename = task_classes["extract"].paths(task)[0] - - task = task_classes["fiberflatnight"].name_join(props_and_band) - fiberflat_filename = task_classes["fiberflatnight"].paths(task)[0] - - task = task_classes["sky"].name_join(props_and_band) - sky_filename = task_classes["sky"].paths(task)[0] - - # check all files exist - if os.path.isfile(frame_filename) \ - and os.path.isfile(fiberflat_filename) \ - and os.path.isfile(sky_filename) : - - options["frames"].append(frame_filename) - options["skymodels"].append(sky_filename) - options["fiberflats"].append(fiberflat_filename) - - else : - log.warning("missing band {} for {}".format(band,name)) - - options.update(opts) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_fit_stdstars" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import stdstars - optlist = self._option_list(name, opts) - args = stdstars.parse(optlist) - stdstars.main(args) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready on all fierflatnight with same night,band,spec - props = self.name_split(name) - log = get_logger() - tt="fluxcalib" - cmd = "select name from {} where night={} and expid={} and spec={} and state=0".format(tt,props["night"],props["expid"],props["spec"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {}".format(tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/pipeline/tasks/traceshift.py b/deprecated/py/desispec/pipeline/tasks/traceshift.py deleted file mode 100644 index 87f380b30..000000000 --- a/deprecated/py/desispec/pipeline/tasks/traceshift.py +++ /dev/null @@ -1,144 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.pipeline.tasks.traceshift -================================== - -""" - -from __future__ import absolute_import, division, print_function - -from collections import OrderedDict - -from ..defs import (task_name_sep, task_state_to_int, task_int_to_state) - -from ...util import option_list - -from ...io import findfile - -from .base import (BaseTask, task_classes) - -from desiutil.log import get_logger - -import sys,re,os,copy - -# NOTE: only one class in this file should have a name that starts with "Task". - -class TaskTraceShift(BaseTask): - """Class containing the properties of one trace shift task. - """ - def __init__(self): - super(TaskTraceShift, self).__init__() - # then put int the specifics of this class - # _cols must have a state - self._type = "traceshift" - self._cols = [ - "night", - "band", - "spec", - "expid", - "state" - ] - self._coltypes = [ - "integer", - "text", - "integer", - "integer", - "integer" - ] - # _name_fields must also be in _cols - self._name_fields = ["night","band","spec","expid"] - self._name_formats = ["08d","s","d","08d"] - - def _paths(self, name): - """See BaseTask.paths. - """ - props = self.name_split(name) - camera = "{}{}".format(props["band"], props["spec"]) - return [ findfile("psf", night=props["night"], expid=props["expid"], - camera=camera, groupname=None, nside=None, band=props["band"], - spectrograph=props["spec"]) ] - - def _deps(self, name, db, inputs): - """See BaseTask.deps. - """ - from .base import task_classes - props = self.name_split(name) - deptasks = { - "image" : task_classes["preproc"].name_join(props), - "psf" : task_classes["psfnight"].name_join(props) - } - return deptasks - - def _run_max_procs(self): - # This is a serial task. - return 1 - - def _run_time(self, name, procs, db): - # Run time on one proc on machine with scale factor == 1.0 - return 2.0 - - - def _run_defaults(self): - """See BaseTask.run_defaults. - """ - opts = {} - opts["degxx"] = 2 - opts["degxy"] = 2 - opts["degyx"] = 0 - opts["degyy"] = 0 - opts["auto"] = True - return opts - - - def _option_list(self, name, opts): - """Build the full list of options. - - This includes appending the filenames and incorporating runtime - options. - """ - from .base import task_classes, task_type - - deps = self.deps(name) - options = {} - options["image"] = task_classes["preproc"].paths(deps["image"])[0] - options["psf"] = task_classes["psfnight"].paths(deps["psf"])[0] - options["outpsf"] = self.paths(name)[0] - - options.update(opts) - return option_list(options) - - def _run_cli(self, name, opts, procs, db): - """See BaseTask.run_cli. - """ - entry = "desi_compute_trace_shifts" - optlist = self._option_list(name, opts) - com = "{} {}".format(entry, " ".join(optlist)) - return com - - def _run(self, name, opts, comm, db): - """See BaseTask.run. - """ - from ...scripts import trace_shifts - optlist = self._option_list(name, opts) - args = trace_shifts.parse(optlist) - if comm is None : - trace_shifts.main(args) - else : - trace_shifts.main_mpi(args, comm=comm) - return - - def postprocessing(self, db, name, cur): - """For successful runs, postprocessing on DB""" - # run getready for all extraction with same night,band,spec - props = self.name_split(name) - log = get_logger() - for tt in ["extract"] : - cmd = "select name from {} where night={} and expid={} and band='{}' and spec={} and state=0".format(tt,props["night"],props["expid"],props["band"],props["spec"]) - cur.execute(cmd) - tasks = [ x for (x,) in cur.fetchall() ] - log.debug("checking {} {}".format(tt,tasks)) - for task in tasks : - task_classes[tt].getready( db=db,name=task,cur=cur) diff --git a/deprecated/py/desispec/qa/__init__.py b/deprecated/py/desispec/qa/__init__.py deleted file mode 100644 index 0e2f61d48..000000000 --- a/deprecated/py/desispec/qa/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -""" -desispec.qa -=========== - -""" -from __future__ import absolute_import, division, print_function - -from .qa_exposure import QA_Exposure -from .qa_frame import QA_Frame -from .qa_brick import QA_Brick -from .qa_prod import QA_Prod -from .qa_night import QA_Night - -__offline_qa_version__ = '0.5.0' diff --git a/deprecated/py/desispec/qa/html.py b/deprecated/py/desispec/qa/html.py deleted file mode 100644 index efff00f98..000000000 --- a/deprecated/py/desispec/qa/html.py +++ /dev/null @@ -1,333 +0,0 @@ -""" -desispec.qa.html -================ - -Module for generating QA HTML. -""" -from __future__ import print_function, absolute_import, division - -import os -import numpy as np -import glob - -from desispec.io import meta, get_nights, get_exposures -from desispec.io.util import makepath - -def header(title): - """ - Parameters - ---------- - title : str, optional - - Returns - ------- - - """ - head = '\n' - head += '\n' - - - head += '\n' - head += '\n' - head += '\n' - head += '\n' - head += '\n' - head += '{:s}\n'.format(title) - head += '\n' - head += '\n' - head += '\n' - head += '\n' - head += '\n' - head += '\n' - head += '\n' - - # Begin the Body - head += '\n' - head += '

{:s}

\n'.format(title) - head += '
\n' - - return head - - -def finish(f, body, links=None): - """ Fill in the HTML file and end it - Parameters - ---------- - f : file - body : str - links : str, optional - """ - # Write links - if links is not None: - f.write(links) - f.write('\n') - f.write('
\n') - # Write body - f.write(body) - # Finish - end = '\n' - end += '\n' - f.write(end) - - return end - - -def init(f, title): - head = header(title) - f.write(head) - # Init links - links = '

Quick Links

\n' - links += '
    \n' - return links - - -def calib(qaprod_dir=None, specprod_dir=None): - """ Generate HTML to orgainze calib HTML - """ - # Organized HTML - html_file = meta.findfile('qa_calib_html', qaprod_dir=qaprod_dir) - html_path,_ = os.path.split(html_file) - makepath(html_file) - # Open - f = open(html_file, 'w') - init(f, 'Calibration QA') - - # Loop on Nights - nights = get_nights(sub_folder='calibnight', specprod_dir=specprod_dir) - nights.sort() - links = '' - body = '' - for night in nights: - all_png = glob.glob(html_path+'/'+night+'/qa*.png') - if len(all_png) == 0: - continue - # Find expid - expids = [] - for png in all_png: - expids.append(int(png[-12:-4])) # A bit risky - expids = np.unique(expids) - expids.sort() - f.write('

    Night -- {:s}

    \n'.format(night)) - f.write('

      \n') - for expid in expids: - # Link - f.write('
    • Exposure {:08d}
    • \n'.format(night, expid, expid)) - # Generate Exposure html - calib_exp(night, expid, qaprod_dir=qaprod_dir) - f.write('

    \n') - - # Finish - finish(f,body) - - # Return - return links, body - - -def calib_exp(night, expid, qaprod_dir=None): - """ Geneate HTML for calib exposure PNGs - Args: - night: - expid: - - Returns: - - """ - # File name - html_file = meta.findfile('qa_calib_exp_html', night=night, expid=expid, qaprod_dir=qaprod_dir) - html_path,_ = os.path.split(html_file) - f = open(html_file, 'w') - init(f, 'Calibration Exposure QA') - - # Loop on Nights - for ctype in ['flat']: - links = '' - body = '' - # - all_png = glob.glob(html_path+'/qa-{:s}-*-{:08d}.png'.format(ctype,expid)) - all_png.sort() - if len(all_png) == 0: - continue - # Type - links +='

    {:s} Calib

    \n'.format(ctype) - for png in all_png: - _,png_file = os.path.split(png) - # Image - href="{:s}".format(png_file[:-4]) - links += '
  • {:s}
  • \n'.format(href, href) - body += '
    \n'.format(href) - body += '\n'.format(png_file) - #f.write('
  • Exposure {:08d}
  • \n'.format(night, expid, expid)) - f.write('
      \n') - f.write(links) - f.write('
    \n') - f.write(body) - - # Finish - finish(f,'') - - # Return - return links, body - - -def make_exposures(qaprod_dir=None): - """ Generate HTML to organize exposure HTML - - Parameters - ---------- - - Returns - ------- - links : str - body : str - - """ - # Organized HTML - html_file = meta.findfile('qa_exposures_html', qaprod_dir=qaprod_dir) - html_path,_ = os.path.split(html_file) - f = open(html_file, 'w') - init(f, 'Exposures QA') - - # Loop on Nights - nights = get_nights(specprod_dir=qaprod_dir) # Scans for nights in QA - nights.sort() - links = '' - body = '' - for night in nights: - # HTML - f.write('

    Night -- {:s}

    \n'.format(night)) - f.write('

      \n') - # Loop on exposures - for expid in get_exposures(night, specprod_dir=qaprod_dir): - if not os.path.exists(html_path+'/'+night+'/{:08d}'.format(expid)): - continue - # Link - f.write('
    • Exposure {:08d}
    • \n'.format(night, expid, expid, expid)) - # Generate Exposure html - make_exposure(night, expid, qaprod_dir=qaprod_dir) - f.write('

    \n') - - # Finish - finish(f,body) - -def make_exposure(night, expid, qaprod_dir=None): - """ Generate HTML for exposure PNGs - - Parameters - ---------- - setup : str - cbset : str - det : int - - Returns - ------- - links : str - body : str - - """ - # File name - html_file = meta.findfile('qa_exposure_html', night=night, expid=expid, qaprod_dir=qaprod_dir) - html_path,_ = os.path.split(html_file) - f = open(html_file, 'w') - init(f, 'Exposure QA') - - links = '' - body = '' - # Loop on Nights - for ctype in ['sky', 'flux']: - # - all_png = glob.glob(html_path+'/qa-{:s}-*-{:08d}.png'.format(ctype,expid)) - all_png.sort() - if len(all_png) == 0: - continue - # Type - links += '

    {:s} Calib

    \n'.format(ctype) - for png in all_png: - _,png_file = os.path.split(png) - # Image - href="{:s}".format(png_file[:-4]) - links += '
  • {:s}
  • \n'.format(href, href) - body += '
    \n'.format(href) - body += '\n'.format(png_file) - #f.write('
  • Exposure {:08d}
  • \n'.format(night, expid, expid)) - f.write('
      \n') - f.write(links) - f.write('
    \n') - f.write(body) - - # Finish - finish(f,'') - - # Return - return links, body - - - -def toplevel(qaprod_dir=None): - """ Generate HTML to top level QA - Mainly generates the highest level HTML file - which has links to the Exposure and Calib QA. - - This also slurps any .png files in the top-level - - Parameters - ---------- - setup : str - cbset : str - det : int - - Returns - ------- - links : str - body : str - - """ - # Organized HTML - html_file = meta.findfile('qa_toplevel_html', qaprod_dir=qaprod_dir) - html_path,_ = os.path.split(html_file) - f = open(html_file, 'w') - init(f, 'Top Level QA') - - # Calib? - calib2d_file = meta.findfile('qa_calib_html', qaprod_dir=qaprod_dir) - if os.path.exists(calib2d_file): - # Truncate the path - c2d_path, fname = os.path.split(calib2d_file) - last_slash = c2d_path.rfind('/') - f.write('

    Calibration QA

    \n'.format(c2d_path[last_slash+1:]+'/'+fname)) - # Full path - #f.write('

    Calibration QA

    \n'.format(calib2d_file)) - # Exposures? - exposures_file = meta.findfile('qa_exposures_html', qaprod_dir=qaprod_dir) - if os.path.exists(exposures_file): - # Truncated path - exp_path, fname = os.path.split(exposures_file) - last_slash = exp_path.rfind('/') - f.write('

    Exposures QA

    \n'.format(exp_path[last_slash+1:]+'/'+fname)) - # Full path - #f.write('

    Exposures QA

    \n'.format(exposures_file)) - - # Existing PNGs - f.write('
    \n') - f.write('

    PNGs

    \n') - all_png = glob.glob(html_path+'/*.png') - all_png.sort() - # Type - links = '' - body = '' - for png in all_png: - _, png_file = os.path.split(png) - # Image - href="{:s}".format(png_file[:-4]) - links += '
  • {:s}
  • \n'.format(href, href) - body += '
    \n'.format(href) - body += '\n'.format(png_file) - f.write('

      \n') - f.write(links) - f.write('

    \n') - f.write(body) - - # Finish - finish(f,'') - - # Return - return diff --git a/deprecated/py/desispec/qa/qa_brick.py b/deprecated/py/desispec/qa/qa_brick.py deleted file mode 100644 index afdc22519..000000000 --- a/deprecated/py/desispec/qa/qa_brick.py +++ /dev/null @@ -1,115 +0,0 @@ -""" -desispec.qa.qa_brick -==================== - -Classes to organize and execute QA for a DESI exposure. -""" - -from __future__ import print_function, absolute_import, division - -import numpy as np - - -class QA_Brick(object): - def __init__(self, name='None', in_data=None): - """ - Class to organize and execute QA for a DESI brick - x.flavor, x.data, x.camera - - Args: - name: str, optional - in_data: dict, optional - Allows for previous data to be ingested - Notes: - Attributes: - All input args become object attributes. - """ - # Parse - self.brick_name = name - - # Initialize data - if in_data is None: - self.data = dict(name='') - else: - assert isinstance(in_data,dict) - self.data = in_data - - def init_qatype(self, qatype, param, re_init=False): - """Initialize parameters for a given qatype - qatype: str - Type of QA to be performed (e.g. REDROCK) - param: dict - Dict of parameters to guide QA - re_init: bool, (optional) - Re-initialize parameter dict - Code will always add new parameters if any exist - """ - # Fill and return if not set previously or if re_init=True - if (qatype not in self.data) or re_init: - self.data[qatype] = {} - self.data[qatype]['PARAMS'] = param - return - - # Update the new parameters only - for key in param: - if key not in self.data[qatype]['PARAMS']: - self.data[qatype]['PARAMS'][key] = param[key] - - def init_redrock(self, re_init=False): - """Initialize parameters for REDROCK output - QA method is desispec.zfind.zfind - Parameters: - ------------ - re_init: bool, (optional) - Re-initialize REDROCK parameter dict - """ - # - - # Standard FIBERFLAT input parameters - redrock_dict = dict(MAX_NFAIL=10, # Maximum number of failed redshifts - ELG_TYPES=['ssp_em_galaxy', 'ELG'], - LRG_TYPES=['LRG'], - QSO_TYPES=['QSO'], - STAR_TYPES=['spEigenStar'], - ) - # Init - self.init_qatype('REDROCK', redrock_dict, re_init=re_init) - - def run_qa(self, qatype, inputs, clobber=True): - """Run QA tests of a given type - Over-writes previous QA of this type, unless otherwise specified - qatype: str - Type of QA to be performed (e.g. SKYSUB) - inputs: tuple - Set of inputs for the tests - clobber: bool, optional [True] - Over-write previous QA - """ - #from desispec.zfind.zfind import qa_redrock - from desispec.zfind import zfind - - # Check for previous QA if clobber==False - if not clobber: - # QA previously performed? - if 'METRICS' in self.data[qatype]: - return - # Run - if qatype == 'REDROCK': - # Expecting: zf, brick - assert len(inputs) == 2 - # Init parameters (as necessary) - self.init_redrock() - # Run - reload(zfind) - qadict = zfind.qa_redrock(self.data[qatype]['PARAMS'], inputs[0], inputs[1]) - else: - raise ValueError('Not ready to perform {:s} QA'.format(qatype)) - # Update - self.data[qatype]['METRICS'] = qadict - - def __repr__(self): - """ - Print formatting - """ - return ('{:s}: name={:s}'.format( - self.__class__.__name__, self.brick_name)) diff --git a/deprecated/py/desispec/qa/qa_exposure.py b/deprecated/py/desispec/qa/qa_exposure.py deleted file mode 100644 index b4b4b5fa8..000000000 --- a/deprecated/py/desispec/qa/qa_exposure.py +++ /dev/null @@ -1,321 +0,0 @@ -""" -desispec.qa.qa_exposure -======================= - -Classes to organize and execute QA for a DESI exposure. -""" - -from __future__ import print_function, absolute_import, division - -import numpy as np -import os - -from astropy.table import Table, vstack - -from desiutil.log import get_logger -from desispec.io import read_params -from desispec import io as desiio -from desispec.qa.qa_frame import qaframe_from_frame -from desispec.io.qa import qafile_from_framefile -from desispec.io import load_qa_multiexp -from desispec.io import qaprod_root -from desispec.io import read_meta_frame -from desispec.io import get_files -from desispec.io import write_qa_exposure -from desispec.io import write_qa_multiexp - -# log=get_logger() -desi_params = read_params() - - -class QA_Exposure(object): - def __init__(self, expid, night, flavor=None, specprod_dir=None, in_data=None, - qaprod_dir=None, no_load=False, multi_root=None, **kwargs): - """ - Class to organize and execute QA for a DESI Exposure - - x.flavor, x.data - - Args: - expid: int -- Exposure ID - night: str -- YYYYMMDD - specprod_dir(str): Path containing the exposures/ directory to use. If the value - is None, then the value of :func:`specprod_root` is used instead. - in_data: dict, optional -- Input data - Mainly for reading from disk - no_load: bool, optional -- Do not load QA data (rare) - multi_root: str, optional - Load QA from a slurped file. - This is the root and the path is qaprod_dir - - Notes: - - Attributes: - All input args become object attributes. - """ - # Init - if not isinstance(expid, int): - raise IOError("expid must be an int at instantiation") - self.expid = expid - self.night = night - self.meta = {} - # Paths - self.specprod_dir = specprod_dir - if qaprod_dir is None: - qaprod_dir = qaprod_root(self.specprod_dir) - self.qaprod_dir = qaprod_dir - - # Load meta from frame (ideally) - frames_dict = get_files(filetype=str('frame'), night=night, - expid=expid, specprod_dir=self.specprod_dir) - if len(frames_dict) > 0: - frame_file = list(frames_dict.items())[0][1] # Any one will do - frame_meta = read_meta_frame(frame_file) - self.load_meta(frame_meta) - flavor = self.meta['FLAVOR'] # Over-rides any input value - else: - flavor = flavor - - assert flavor in desi_params['frame_types'], "Unknown flavor {} for night {} expid {}".format(flavor, night, expid) - if flavor in ['science']: - self.type = 'data' - else: - self.type = 'calib' - self.flavor = flavor - - # Internal dicts - self.data = dict(flavor=self.flavor, expid=self.expid, - night=self.night, frames={}) - - # Load? - if no_load: - return - - if in_data is None: - self.load_qa_data(multi_root=multi_root, **kwargs) - else: - assert isinstance(in_data,dict) - self.data = in_data - - # Others - self.qa_s2n = None - - def fluxcalib(self, outfil): - """ Perform QA on fluxcalib results for an Exposure - - Args: - outfil: str -- Filename for PDF (may automate) - - Independent results for each channel - """ - from . import qa_plots - # Init - if 'FLUXCALIB' not in self.data: - self.data['FLUXCALIB'] = {} - # Loop on channel - cameras = list(self.data['frames'].keys()) - for channel in ['b','r','z']: - # Init - if channel not in self.data['FLUXCALIB']: - self.data['FLUXCALIB'][channel] = {} - # Load - ZPval = [] - for camera in cameras: - if camera[0] == channel: - ZPval.append(self.data['frames'][camera]['FLUXCALIB']['METRICS']['ZP']) - # Measure RMS - if len(ZPval) > 0: - self.data['FLUXCALIB'][channel]['ZP_RMS'] = np.std(ZPval) - - # Figure - qa_plots.exposure_fluxcalib(outfil, self.data) - - def load_meta(self, frame_meta): - """ Load meta info from input Frame meta - Args: - frame_meta: dict of meta data from a frame file - """ - desi_params = read_params() - for key in desi_params['frame_meta']: - if key in ['CAMERA']: # Frame specific - continue - try: - self.meta[key] = frame_meta[key] - except KeyError: - print("Keyword {:s} not present! Could be a problem".format(key)) - - def load_qa_data(self, remove=False, multi_root=None): - """ Load the QA data files for a given exposure (currently yaml) - - Args: - remove: bool, optional - Remove QA frame files - multi_root: str, optional - If provided, load the QA from a slurped file (preferred because it is fast) - """ - if multi_root is None: - qafiles = desiio.get_files(filetype='qa_'+self.type, night=self.night, - expid=self.expid, - qaprod_dir=self.qaprod_dir) - # Load into frames - for camera,qadata_path in qafiles.items(): - qa_frame = desiio.load_qa_frame(qadata_path) - # Remove? - if remove: - os.remove(qadata_path) - # Test - for key in ['expid','night']: - assert getattr(qa_frame,key) == getattr(self, key) - # Save - self.data['frames'][camera] = qa_frame.qa_data - else: - # Load - mdict = load_qa_multiexp(os.path.join(self.qaprod_dir, multi_root)) - self.parse_multi_qa_dict(mdict) - - def parse_multi_qa_dict(self, mdict): - """ Deal with different packing of QA data in slurp file - - Args: - mdict: dict - Contains the QA - - Returns: - Loads up self.data['frames'] and self.data['meta'] - - """ - # Parse - for key in mdict[self.night][str(self.expid)].keys(): - # A bit kludgy - if len(key) > 2: - if key == 'meta': - self.data[key] = mdict[self.night][str(self.expid)][key].copy() - continue - # Load em - self.data['frames'][key] = mdict[self.night][str(self.expid)][key].copy() - - def build_qa_data(self, rebuild=False): - """ - Build or re-build QA data - - - Args: - rebuild: bool, optional - - :return: - Data is loaded in self.data - """ - frame_files = desiio.get_files(filetype='frame', night=self.night, - expid=self.expid, - specprod_dir=self.specprod_dir) - # Load into frames - for camera, frame_file in frame_files.items(): - if rebuild: - qafile, qatype = qafile_from_framefile(frame_file, qaprod_dir=self.qaprod_dir) - if os.path.isfile(qafile): - os.remove(qafile) - # Generate qaframe (and figures?) - _ = qaframe_from_frame(frame_file, specprod_dir=self.specprod_dir, make_plots=False, - qaprod_dir=self.qaprod_dir) - # Reload - self.load_qa_data() - - def s2n_table(self): - """ - Generate a flat Table of QA S/N measurements for the Exposure - Includes all fibers of the exposure - - Args: - - Returns: - Table is held in self.qa_s2n - - """ - from desispec.qa.qalib import s2n_flux_astro - - sub_tbls = [] - # Load up - for camera in self.data['frames'].keys(): - # Sub_tbl - if 'S2N' not in self.data['frames'][camera].keys(): - continue - sub_tbl = Table() - sub_tbl['MEDIAN_SNR'] = self.data['frames'][camera]['S2N']['METRICS']['MEDIAN_SNR'] - sub_tbl['FIBER'] = np.arange(len(sub_tbl), dtype=int) - sub_tbl['CAMERA'] = camera - sub_tbl['NIGHT'] = self.night - sub_tbl['EXPID'] = self.expid - sub_tbl['CHANNEL'] = camera[0] - # Ugly S/N (Object/fiber based) - s2n_dict = self.data['frames'][camera]['S2N'] - max_o = np.max([len(otype) for otype in s2n_dict['METRICS']['OBJLIST']]) - objtype = np.array([' '*max_o]*len(sub_tbl)) - # Coeffs - coeffs = np.zeros((len(sub_tbl), len(s2n_dict['METRICS']['FITCOEFF_TGT'][0]))) - # Others - mags = np.zeros_like(sub_tbl['MEDIAN_SNR'].data) - resid = -999. * np.ones_like(sub_tbl['MEDIAN_SNR'].data) - # Fitting - #fitfunc = s2n_flux_astro()#exptime=s2n_dict['METRICS']['EXPTIME']) #r2=s2n_dict['METRICS']['r2']) - for oid, otype in enumerate(s2n_dict['METRICS']['OBJLIST']): - fibers = np.array(s2n_dict['METRICS']['{:s}_FIBERID'.format(otype)]) - if len(fibers) == 0: - continue - coeff = s2n_dict['METRICS']['FITCOEFF_TGT'][oid] - # Avoid NAN - if np.any(np.isnan(coeff)): - continue - coeffs[fibers,:] = np.outer(np.ones_like(fibers), coeff) - # Set me - objtype[fibers] = otype - mags[fibers] = np.array(s2n_dict["METRICS"]["SNR_MAG_TGT"][oid][1]) - - # Residuals - flux = 10 ** (-0.4 * (mags[fibers] - 22.5)) - fit_snr = s2n_flux_astro(flux, *coeff) * s2n_dict['METRICS']['EXPTIME']**(1/2) - resid[fibers] = (sub_tbl['MEDIAN_SNR'][fibers] - fit_snr) / fit_snr - # Sub_tbl - sub_tbl['MAGS'] = mags - sub_tbl['RESID'] = resid - sub_tbl['OBJTYPE'] = objtype - #sub_tbl['COEFFS'] = coeffs - # Save - sub_tbls.append(sub_tbl) - # Stack me - if len(sub_tbls) > 0: - qa_tbl = vstack(sub_tbls) - else: - qa_tbl = Table() - # Hold - self.qa_s2n = qa_tbl - # Add meta - if 'meta' in self.data.keys(): - self.qa_s2n.meta = self.data['meta'] - - def slurp_into_file(self, multi_root): - """ - Write the data of an Exposure object into a JSON file - - Args: - multi_root (str): root name of the slurped file - - Returns: - - """ - # Load - mdict_root = os.path.join(self.qaprod_dir, multi_root) - mdict = load_qa_multiexp(mdict_root) - # Check on night - if self.night not in mdict.keys(): - mdict[self.night] = {} - # Insert - idict = write_qa_exposure('foo', self, ret_dict=True) - mdict[self.night][str(self.expid)] = idict[self.night][self.expid] - # Write - write_qa_multiexp(mdict_root, mdict) - - def __repr__(self): - """ Print formatting - """ - return ('{:s}: night={:s}, expid={:d}, type={:s}, flavor={:s}'.format( - self.__class__.__name__, self.night, self.expid, self.type, self.flavor)) diff --git a/deprecated/py/desispec/qa/qa_frame.py b/deprecated/py/desispec/qa/qa_frame.py deleted file mode 100644 index b97aa8a0b..000000000 --- a/deprecated/py/desispec/qa/qa_frame.py +++ /dev/null @@ -1,417 +0,0 @@ -""" -desispec.qa.qa_frame -==================== - -Classes to organize and execute QA for a DESI exposure. -""" - -from __future__ import print_function, absolute_import, division - -import warnings - -import numpy as np -import copy - -from desiutil.log import get_logger - -from desispec.io import read_params -from desispec import frame - -desi_params = read_params() - -# log=get_logger() - -class QA_Frame(object): - def __init__(self, inp): - """ - Class to organize and execute QA for a DESI frame - - x.flavor, x.qa_data, x.camera - - Args: - inp : Frame, Frame meta (Header), or dict - * Frame - * astropy.io.fits.Header - * dict -- Usually read from hard-drive - - Attributes: - night: str - expid: int - camera: str - - Notes: - - """ - if isinstance(inp, dict): - assert len(inp) == 1 # There must be only one night - self.night = list(inp.keys())[0] - assert len(inp[self.night]) == 1 # There must be only one exposure - self.expid = int(list(inp[self.night].keys())[0]) - assert len(inp[self.night][self.expid]) == 2 - self.flavor = inp[self.night][self.expid].pop('flavor') - self.camera = list(inp[self.night][self.expid].keys())[0] - assert self.camera[0] in ['b','r','z'] - self.qa_data = inp[self.night][self.expid][self.camera] - else: - if isinstance(inp, frame.Frame): - inp = inp.meta - # Generate from Frame and init QA data - qkeys = ['flavor', 'camera', 'expid', 'night'] - for key in qkeys: - setattr(self, key, inp[key.upper()]) # FITS header - self.qa_data = {} - - # Final test - assert self.flavor in desi_params['frame_types'] - - def init_qatype(self, qatype, param, re_init=False): - """Initialize parameters for a given qatype - qatype: str - Type of QA to be performed (e.g. SKYSUB) - param: dict - Dict of parameters to guide QA - re_init: bool, (optional) - Re-initialize parameter dict - Code will always add new parameters if any exist - """ - # Fill and return if not set previously or if re_init=True - if (qatype not in self.qa_data) or re_init: - self.qa_data[qatype] = {} - self.qa_data[qatype]['PARAMS'] = param - return - - # Update the new parameters only - for key in param: - if key not in self.qa_data[qatype]['PARAMS']: - self.qa_data[qatype]['PARAMS'][key] = param[key] - - def init_fiberflat(self, re_init=False): - """Initialize parameters for FIBERFLAT QA - QA method is desispec.fiberflat.qa_fiberflat - - Parameters: - ------------ - re_init: bool, (optional) - Re-initialize FIBERFLAT parameter dict - """ - # - assert self.flavor in ['flat'] - - # Standard FIBERFLAT input parameters - fflat_dict = dict(MAX_N_MASK=20000, # Maximum number of pixels to mask - MAX_SCALE_OFF=0.05, # Maximum offset in counts (fraction) - MAX_OFF=0.15, # Maximum offset from unity - MAX_MEAN_OFF=0.05, # Maximum offset in mean of fiberflat - MAX_RMS=0.02, # Maximum RMS in fiberflat - ) - # Init - self.init_qatype('FIBERFLAT', fflat_dict, re_init=re_init) - - def init_fluxcalib(self, re_init=False): - """ Initialize parameters for FLUXCALIB QA - Args: - re_init: bool, (optional) - Re-initialize parameter dict - - Returns: - - """ - log=get_logger() - assert self.flavor == 'science' - - # Standard FLUXCALIB input parameters - flux_dict = dict(ZP_WAVE=0., # Wavelength for ZP evaluation (camera dependent) - MAX_ZP_OFF=0.2, # Max offset in ZP for individual star - ) - - if self.camera[0] == 'b': - flux_dict['ZP_WAVE'] = 4800. # Ang - elif self.camera[0] == 'r': - flux_dict['ZP_WAVE'] = 6500. # Ang - elif self.camera[0] == 'z': - flux_dict['ZP_WAVE'] = 8250. # Ang - else: - log.error("Not ready for camera {}!".format(self.camera)) - - # Init - self.init_qatype('FLUXCALIB', flux_dict, re_init=re_init) - - def init_skysub(self, re_init=False): - """Initialize parameters for SkySub QA - QA method is desispec.sky.qa_skysub - - Parameters: - ------------ - re_init: bool, (optional) - Re-initialize SKYSUB parameter dict - """ - assert self.flavor == 'science' - - sky_dict = desi_params['qa']['skysub']['PARAMS'] - # Standard SKYSUB input parameters - #sky_dict = dict( - # PCHI_RESID=0.05, # P(Chi^2) limit for bad skyfiber model residuals - # PER_RESID=95., # Percentile for residual distribution - # BIN_SZ=0.1, #- Bin size for residual/sigma histogram - # ) - # Init - self.init_qatype('SKYSUB', sky_dict, re_init=re_init) - - def init_s2n(self, re_init=False): - """Initialize parameters for SkySub QA - QA method is desispec.sky.qa_skysub - - Parameters: - ------------ - re_init: bool, (optional) - Re-initialize SKYSUB parameter dict - """ - assert self.flavor == 'science' - # Parameters - s2n_dict = desi_params['qa']['skysub']['PARAMS'].copy() - # Init - self.init_qatype('S2N', s2n_dict, re_init=re_init) - - def run_qa(self, qatype, inputs, clobber=True): - """Run QA tests of a given type - Over-writes previous QA of this type, unless otherwise specified - - qatype: str - Type of QA to be performed (e.g. SKYSUB) - inputs: tuple - Set of inputs for the tests - clobber: bool, optional [True] - Over-write previous QA - - Returns: - bool - True = Calculation performed - False = Calculation not performed - """ - from desispec.sky import qa_skysub - from desispec.fiberflat import qa_fiberflat - from desispec.fluxcalibration import qa_fluxcalib - from desispec.qa.qalib import s2nfit - - # Check for previous QA if clobber==False - if (not clobber) and (qatype in self.qa_data.keys()): - # QA previously performed? - if 'METRICS' in self.qa_data[qatype]: - return False - # Run - if qatype == 'SKYSUB': - # Expecting: frame, skymodel - assert len(inputs) == 2 - # Init parameters (as necessary) - self.init_skysub() - # Run - qadict = qa_skysub(self.qa_data[qatype]['PARAMS'], - inputs[0], inputs[1]) - elif qatype == 'FIBERFLAT': - # Expecting: frame, fiberflat - assert len(inputs) == 2 - # Init parameters (as necessary) - self.init_fiberflat() - # Run - qadict = qa_fiberflat(self.qa_data[qatype]['PARAMS'], inputs[0], inputs[1]) - elif qatype == 'FLUXCALIB': - # Expecting: frame, fluxcalib - assert len(inputs) == 2 - # Init parameters (as necessary) - self.init_fluxcalib() - # Run - qadict = qa_fluxcalib(self.qa_data[qatype]['PARAMS'], inputs[0], inputs[1]) - elif qatype == 'S2N': - # Expecting only a frame - assert len(inputs) == 1 - # Init parameters (as necessary) - self.init_s2n() - # Run - qadict,fitsnr = s2nfit(inputs[0], self.camera, self.qa_data[qatype]['PARAMS']) - else: - raise ValueError('Not ready to perform {:s} QA'.format(qatype)) - # Update - self.qa_data[qatype]['METRICS'] = qadict - # Return - return True - - def __repr__(self): - """ Print formatting - """ - return ('{:s}: night={:s}, expid={:d}, camera={:s}, flavor={:s}'.format( - self.__class__.__name__, self.night, self.expid, self.camera, self.flavor)) - - -def qaframe_from_frame(frame_file, specprod_dir=None, make_plots=False, qaprod_dir=None, - output_dir=None, clobber=True): - """ Generate a qaframe object from an input frame_file name (and night) - - Write QA to disk - Will also make plots if directed - Args: - frame_file: str - specprod_dir: str, optional - qa_dir: str, optional -- Location of QA - make_plots: bool, optional - output_dir: str, optional - - Returns: - - """ - import glob - import os - - from desispec.io import read_frame - from desispec.io import meta - from desispec.io.qa import load_qa_frame, write_qa_frame - from desispec.io.qa import qafile_from_framefile - from desispec.io.frame import search_for_framefile - from desispec.io.fiberflat import read_fiberflat - from desispec.fiberflat import apply_fiberflat - from desispec.qa import qa_plots - from desispec.io.sky import read_sky - from desispec.io.fluxcalibration import read_flux_calibration - from desispec.qa import qa_plots_ql - from desispec.calibfinder import CalibFinder - - if '/' in frame_file: # If present, assume full path is used here - pass - else: # Find the frame file in the desispec hierarchy? - frame_file = search_for_framefile(frame_file, specprod_dir=specprod_dir) - - # Load frame meta - frame = read_frame(frame_file) - frame_meta = frame.meta - night = frame_meta['NIGHT'].strip() - camera = frame_meta['CAMERA'].strip() - expid = frame_meta['EXPID'] - spectro = int(frame_meta['CAMERA'][-1]) - - # Filename - qafile, qatype = qafile_from_framefile(frame_file, qaprod_dir=qaprod_dir, output_dir=output_dir) - if os.path.isfile(qafile) and (not clobber): - write = False - else: - write = True - qaframe = load_qa_frame(qafile, frame_meta, flavor=frame_meta['FLAVOR']) - # Flat QA - if frame_meta['FLAVOR'] in ['flat']: - fiberflat_fil = meta.findfile('fiberflat', night=night, camera=camera, expid=expid, - specprod_dir=specprod_dir) - try: # Backwards compatibility - fiberflat = read_fiberflat(fiberflat_fil) - except FileNotFoundError: - fiberflat_fil = fiberflat_fil.replace('exposures', 'calib2d') - path, basen = os.path.split(fiberflat_fil) - path,_ = os.path.split(path) - fiberflat_fil = os.path.join(path, basen) - fiberflat = read_fiberflat(fiberflat_fil) - if qaframe.run_qa('FIBERFLAT', (frame, fiberflat), clobber=clobber): - write = True - if make_plots: - # Do it - qafig = meta.findfile('qa_flat_fig', night=night, camera=camera, expid=expid, - qaprod_dir=qaprod_dir, specprod_dir=specprod_dir, outdir=output_dir) - if (not os.path.isfile(qafig)) or clobber: - qa_plots.frame_fiberflat(qafig, qaframe, frame, fiberflat) - # SkySub QA - if qatype == 'qa_data': - sky_fil = meta.findfile('sky', night=night, camera=camera, expid=expid, specprod_dir=specprod_dir) - - try: # For backwards compatability - calib = CalibFinder([frame_meta]) - except KeyError: - fiberflat_fil = meta.findfile('fiberflatnight', night=night, camera=camera, specprod_dir=specprod_dir) - else: - fiberflat_fil = os.path.join(os.getenv('DESI_SPECTRO_CALIB'), calib.data['FIBERFLAT']) - if not os.path.exists(fiberflat_fil): - # Backwards compatibility (for now) - dummy_fiberflat_fil = meta.findfile('fiberflat', night=night, camera=camera, expid=expid, - specprod_dir=specprod_dir) # This is dummy - path = os.path.dirname(os.path.dirname(dummy_fiberflat_fil)) - fiberflat_files = glob.glob(os.path.join(path,'*','fiberflat-'+camera+'*.fits*')) - if len(fiberflat_files) == 0: - path = path.replace('exposures', 'calib2d') - path,_ = os.path.split(path) # Remove night - fiberflat_files = glob.glob(os.path.join(path,'fiberflat-'+camera+'*.fits*')) - - # Sort and take the first (same as old pipeline) - fiberflat_files.sort() - fiberflat_fil = fiberflat_files[0] - - # Load sky model and run - try: - skymodel = read_sky(sky_fil) - except FileNotFoundError: - warnings.warn("Sky file {:s} not found. Skipping..".format(sky_fil)) - else: - # Load if skymodel found - fiberflat = read_fiberflat(fiberflat_fil) - apply_fiberflat(frame, fiberflat) - # - if qaframe.run_qa('SKYSUB', (frame, skymodel), clobber=clobber): - write=True - if make_plots: - qafig = meta.findfile('qa_sky_fig', night=night, camera=camera, expid=expid, - specprod_dir=specprod_dir, outdir=output_dir, qaprod_dir=qaprod_dir) - qafig2 = meta.findfile('qa_skychi_fig', night=night, camera=camera, expid=expid, - specprod_dir=specprod_dir, outdir=output_dir, qaprod_dir=qaprod_dir) - if (not os.path.isfile(qafig)) or clobber: - qa_plots.frame_skyres(qafig, frame, skymodel, qaframe) - #qa_plots.frame_skychi(qafig2, frame, skymodel, qaframe) - - # S/N QA on cframe - if qatype == 'qa_data': - # cframe - cframe_file = frame_file.replace('frame-', 'cframe-') - try: - cframe = read_frame(cframe_file) - except FileNotFoundError: - warnings.warn("cframe file {:s} not found. Skipping..".format(cframe_file)) - else: - if qaframe.run_qa('S2N', (cframe,), clobber=clobber): - write=True - # Figure? - if make_plots: - s2n_dict = copy.deepcopy(qaframe.qa_data['S2N']) - qafig = meta.findfile('qa_s2n_fig', night=night, camera=camera, expid=expid, - specprod_dir=specprod_dir, outdir=output_dir, qaprod_dir=qaprod_dir) - # Add an item or two for the QL method - s2n_dict['CAMERA'] = camera - s2n_dict['EXPID'] = expid - s2n_dict['PANAME'] = 's2nfit' - s2n_dict['METRICS']['RA'] = frame.fibermap['TARGET_RA'].data - s2n_dict['METRICS']['DEC'] = frame.fibermap['TARGET_DEC'].data - # Deal with YAML list instead of ndarray - s2n_dict['METRICS']['MEDIAN_SNR'] = np.array(s2n_dict['METRICS']['MEDIAN_SNR']) - # Generate - if (not os.path.isfile(qafig)) or clobber: - qa_plots.frame_s2n(s2n_dict, qafig) - - # FluxCalib QA - if qatype == 'qa_data': - # Standard stars - stdstar_fil = meta.findfile('stdstars', night=night, camera=camera, expid=expid, specprod_dir=specprod_dir, - spectrograph=spectro) - # try: - # model_tuple=read_stdstar_models(stdstar_fil) - # except FileNotFoundError: - # warnings.warn("Standard star file {:s} not found. Skipping..".format(stdstar_fil)) - # else: - flux_fil = meta.findfile('fluxcalib', night=night, camera=camera, expid=expid, specprod_dir=specprod_dir) - try: - fluxcalib = read_flux_calibration(flux_fil) - except FileNotFoundError: - warnings.warn("Flux file {:s} not found. Skipping..".format(flux_fil)) - else: - if qaframe.run_qa('FLUXCALIB', (frame, fluxcalib), clobber=clobber): # , model_tuple))#, indiv_stars)) - write = True - if make_plots: - qafig = meta.findfile('qa_flux_fig', night=night, camera=camera, expid=expid, - specprod_dir=specprod_dir, outdir=output_dir, qaprod_dir=qaprod_dir) - if (not os.path.isfile(qafig)) or clobber: - qa_plots.frame_fluxcalib(qafig, qaframe, frame, fluxcalib) # , model_tuple) - # Write - if write: - write_qa_frame(qafile, qaframe, verbose=True) - return qaframe diff --git a/deprecated/py/desispec/qa/qa_multiexp.py b/deprecated/py/desispec/qa/qa_multiexp.py deleted file mode 100644 index d1d119571..000000000 --- a/deprecated/py/desispec/qa/qa_multiexp.py +++ /dev/null @@ -1,270 +0,0 @@ -""" -desispec.qa.qa_multiexp -======================= - -Class to organize QA for multiple exposures. -Likely to only be used as parent of QA_Night or QA_Prod. -""" - -from __future__ import print_function, absolute_import, division - -import numpy as np -import glob, os -import warnings - -from desispec.io import specprod_root -from desispec.io import write_qa_exposure -from desispec.io import write_qa_multiexp -from desispec.io import qaprod_root - -from desispec.qa import qa_exposure - -from desiutil.log import get_logger - -# log = get_logger() - - -class QA_MultiExp(object): - def __init__(self, specprod_dir=None, qaprod_dir=None): - """ Class to organize and execute QA for a DESI production - - Args: - specprod_dir(str): Path containing the exposures/ directory to use. If the value - is None, then the value of :func:`specprod_root` is used instead. - qaprod_dir(str): Path containing the root path for QA output - Notes: - - Attributes: - qa_exps : list - List of QA_Exposure classes, one per exposure in production - data : dict - """ - # Init - if specprod_dir is None: - specprod_dir = specprod_root() - if qaprod_dir is None: - qaprod_dir = qaprod_root() - # - self.specprod_dir = specprod_dir - self.qaprod_dir = qaprod_dir - tmp = specprod_dir.split('/') - self.prod_name = tmp[-1] if (len(tmp[-1]) > 0) else tmp[-2] - # Exposure dict stored as [night][exposure] - self.mexp_dict = {} - # QA Exposure objects - self.qa_exps = [] - # dict to hold QA data - # Data Model : key1 = Night(s); key2 = Expids - self.data = {} - # - self.qaexp_outroot = None - - def build_data(self): - """ Build QA data dict - """ - from desiutil.io import combine_dicts - # Loop on exposures - odict = {} - for qaexp in self.qa_exps: - # Get the exposure dict - idict = write_qa_exposure('foo', qaexp, ret_dict=True) - odict = combine_dicts(odict, idict) - # Finish - self.data = odict - - def load_exposure_s2n(self, nights='all', redo=False): - """ - Generate a series of QA_Exposure objects from self.data - and then load up the S/N tables in the QA_Exposure objects - - Args: - nights: str, optional - redo: bool, optional - - Returns: - self.qa_exps holds QA_Exposure objects - - """ - # Already loaded? Should check for the table - if (len(self.qa_exps) > 0) and (not redo): - return - # Nights - for night in self.data: - if (night not in nights) and (nights != 'all'): - continue - # Exposures - for expid in self.data[night]: - # Cameras - if self.data[night][expid]['flavor'] != 'science': - continue - # Instantiate - qaexp = qa_exposure.QA_Exposure(int(expid), night, 'science', no_load=True, - qaprod_dir=self.qaprod_dir, specprod_dir=self.specprod_dir) - qaexp.parse_multi_qa_dict(self.data) - qaexp.s2n_table() - # Append - self.qa_exps.append(qaexp) - - def get_qa_table(self, qatype, metric, nights='all', channels='all'): - """ Generate a table of QA values for a specific QA METRIC - - Args: - qatype: str - FIBERFLAT, SKYSUB - metric: str - nights: str or list of str, optional - channels: str or list of str, optional - 'b', 'r', 'z' - - Returns: - qa_tbl: Table - Will be empty if none of the QA matches - """ - from astropy.table import Table - out_list = [] - out_expid = [] - out_expmeta = [] - out_cameras = [] - # Nights - for night in self.data: - if (night not in nights) and (nights != 'all'): - continue - # Exposures - for expid in self.data[night]: - # Cameras - exp_meta = self.data[night][expid]['meta'] - for camera in self.data[night][expid]: - if camera in ['flavor', 'meta']: - continue - if (camera[0] not in channels) and (channels != 'all'): - continue - # Grab - try: - val = self.data[night][expid][camera][qatype]['METRICS'][metric] - except KeyError: # Each exposure has limited qatype - pass - else: - if isinstance(val, (list,tuple)): - out_list.append(val[0]) - else: - out_list.append(val) - # Meta data - out_expid.append(expid) - out_cameras.append(camera) - out_expmeta.append(exp_meta) - # Return Table - qa_tbl = Table() - if len(out_expmeta) == 0: # Empty? - return qa_tbl - qa_tbl[metric] = out_list - qa_tbl['EXPID'] = out_expid - qa_tbl['CAMERA'] = out_cameras - # Add expmeta (includes DATE-OBS) - for key in out_expmeta[0].keys(): - tmp_list = [] - for exp_meta in out_expmeta: - tmp_list.append(exp_meta[key]) - qa_tbl[key] = tmp_list - return qa_tbl - - def load_data(self, inroot=None): - """ Load QA data from disk - """ - from desispec.io import load_qa_multiexp - # Init - if inroot is None: - inroot = self.qaexp_outroot - # Load - self.data = load_qa_multiexp(inroot) - - def make_frameqa(self, make_plots=False, clobber=False, restrict_nights=None): - """ Work through the exposures and make QA for all frames - - Parameters: - make_plots: bool, optional - Remake the plots too? - clobber: bool, optional - restrict_nights: list, optional - Only perform QA on the input list of nights - Returns: - - """ - # imports - from desispec.qa.qa_frame import qaframe_from_frame - from desispec.io.qa import qafile_from_framefile - - # Loop on nights - for night in self.mexp_dict.keys(): - if restrict_nights is not None: - if night not in restrict_nights: - continue - for exposure in self.mexp_dict[night]: - # Object only?? - for camera,frame_fil in self.mexp_dict[night][exposure].items(): - # QA filename - qafile, _ = qafile_from_framefile(frame_fil, qaprod_dir=self.qaprod_dir) - if os.path.isfile(qafile) and (not clobber) and (not make_plots): - continue - # Make QA - qaframe_from_frame(frame_fil, make_plots=make_plots, qaprod_dir=self.qaprod_dir, - clobber=clobber, specprod_dir=self.specprod_dir) - - def slurp(self, make_frameqa=False, remove=True, **kwargs): - """ Slurp all the individual QA files to generate - a list of QA_Exposure objects - - Args: - make_frameqa: bool, optional - Regenerate the individual QA files (at the frame level first) - remove: bool, optional - Remove the individual QA files? - - Returns: - - """ - from desispec.qa import QA_Exposure - log = get_logger() - # Remake? - if make_frameqa: - self.make_frameqa(**kwargs) - # Loop on nights - # Reset - log.info("Resetting QA_Exposure objects") - self.qa_exps = [] - # Loop - for night in self.mexp_dict.keys(): - # Loop on exposures - for exposure in self.mexp_dict[night].keys(): - frames_dict = self.mexp_dict[night][exposure] - if len(frames_dict) == 0: - continue - # Load any frame (for the type) - qa_exp = QA_Exposure(exposure, night, qaprod_dir=self.qaprod_dir, - specprod_dir=self.specprod_dir, remove=remove) - # Append - self.qa_exps.append(qa_exp) - - def write_qa_exposures(self, outroot=None, skip_rebuild=False, **kwargs): - """ Write the slurp of QA Exposures to the hard drive - Args: - outroot: str - skip_rebuild : bool, optional - Do not rebuild the data dict - **kwargs: - - Returns: - output_file : str - - """ - if outroot is None: - outroot = self.qaexp_outroot - # Rebuild? - if not skip_rebuild: - self.build_data() - # Do it - return write_qa_multiexp(outroot, self.data, **kwargs) - - def __repr__(self): - """ Print formatting - """ - return ('{:s}: specprod_dir={:s}'.format(self.__class__.__name__, self.specprod_dir)) diff --git a/deprecated/py/desispec/qa/qa_night.py b/deprecated/py/desispec/qa/qa_night.py deleted file mode 100644 index ab39f5c51..000000000 --- a/deprecated/py/desispec/qa/qa_night.py +++ /dev/null @@ -1,60 +0,0 @@ -""" -desispec.qa.qa_night -==================== - -Class to organize QA for one night of DESI exposures. -""" - -from __future__ import print_function, absolute_import, division - -import numpy as np -import glob, os -import warnings - -from desispec.io import get_exposures -from desispec.io import get_files -from desispec.io import read_meta_frame -from desispec.io import get_nights -from .qa_multiexp import QA_MultiExp - -from desiutil.log import get_logger - -# log = get_logger() - - -class QA_Night(QA_MultiExp): - def __init__(self, night, **kwargs): - """ Class to organize and execute QA for a DESI production - - Args: - specprod_dir(str): Path containing the exposures/ directory to use. If the value - is None, then the value of :func:`specprod_root` is used instead. - Notes: - **kwargs are passed to QA_MultiExp - - Attributes: - qa_exps : list - List of QA_Exposure classes, one per exposure in production - data : dict - """ - # Init - self.night = night - # Instantiate - QA_MultiExp.__init__(self, **kwargs) - # Load up exposures for the full production - nights = get_nights(specprod_dir=self.specprod_dir) - # Check the night exists - if self.night not in nights: - raise IOError("Night {} not in known nights in {}".format( - self.night, self.specprod_dir)) - # Load up - self.mexp_dict[self.night] = {} - for exposure in get_exposures(self.night, specprod_dir = self.specprod_dir): - # Object only?? - frames_dict = get_files(filetype = str('frame'), night = self.night, - expid = exposure, specprod_dir = self.specprod_dir) - self.mexp_dict[self.night][exposure] = frames_dict - # Output file names - self.qaexp_outroot = self.qaprod_dir+'/'+self.night+'_qa' - - diff --git a/deprecated/py/desispec/qa/qa_plots.py b/deprecated/py/desispec/qa/qa_plots.py deleted file mode 100644 index fb5dbf80a..000000000 --- a/deprecated/py/desispec/qa/qa_plots.py +++ /dev/null @@ -1,1584 +0,0 @@ -""" -desispec.qa.qa_plots -==================== - -Module for QA plots. -""" -from __future__ import print_function, absolute_import, division - -import os -import numpy as np -from scipy import signal -import scipy -import scipy.stats -import pdb -import copy - -from astropy.time import Time - -from desiutil.log import get_logger -from desispec import fluxcalibration as dsflux -from desispec.util import set_backend -set_backend() - -import matplotlib -from matplotlib import pyplot as plt -import matplotlib.gridspec as gridspec - -from desispec import util -from desispec.io import makepath -from desispec.fluxcalibration import isStdStar - -from desiutil import plots as desiu_p - -from desispec.io import read_params -desi_params = read_params() - -from desispec.qa.qalib import s2n_funcs - -from desiutil.log import get_logger - -def brick_redrock(outfil, zf, qabrick): - """ QA plots for redrock fits - - Args: - outfil: - qabrick: - zf: ZfindBase object - - Returns: - Stuff? - """ - sty_otype = get_sty_otype() - # Convert types (this should become obsolete) - param = qabrick.data['REDROCK']['PARAMS'] - zftypes = [] - for ztype in zf.spectype: - if ztype in param['ELG_TYPES']: - zftypes.append('ELG') - elif ztype in param['QSO_TYPES']: - zftypes.append('QSO') - elif ztype in param['STAR_TYPES']: - zftypes.append('STAR') - else: - zftypes.append('UNKNWN') - zftypes = np.array(zftypes) - - # Plot - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(2,2) - - # Error vs. z - ax0 = plt.subplot(gs[0,0]) - - # - ax0.set_ylabel(r'$\delta z / (1+z)$') - ax0.set_ylim(0.0, 0.002) - ax0.set_xlabel('z') - - for key in sty_otype: - idx = np.where(zftypes == key)[0] - if len(idx) == 0: - continue - ax0.scatter(zf.z[idx], zf.zerr[idx]/(1+zf.z[idx]), marker='o', - color=sty_otype[key]['color'], label=sty_otype[key]['lbl']) - - # Legend - legend = ax0.legend(loc='upper left', borderpad=0.3, - handletextpad=0.3, fontsize='small') - - # Meta text - ax2 = plt.subplot(gs[1,1]) - ax2.set_axis_off() - show_meta(ax2, qabrick, 'REDROCK', outfil) - - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - plt.savefig(outfil) - plt.close() - print('Wrote QA REDROCK file: {:s}'.format(outfil)) - - - -def frame_skyres(outfil, frame, skymodel, qaframe, quick_look=False): - """ - Generate QA plots and files for sky residuals of a given frame - - Parameters - ---------- - outfil: str - Name of output file - frame: Frame object - skymodel: SkyModel object - qaframe: QAFrame object - """ - from desispec.sky import subtract_sky - log = get_logger() - - # Access metrics - ''' - wavg_ivar = np.sum(res_ivar,0) - chi2_wavg = np.sum(wavg_res**2 * wavg_ivar) - dof_wavg = np.sum(wavg_ivar > 0.) - pchi2_wavg = scipy.stats.distributions.chi2.sf(chi2_wavg, dof_wavg) - chi2_med = np.sum(med_res**2 * wavg_ivar) - pchi2_med = scipy.stats.distributions.chi2.sf(chi2_med, dof_wavg) - ''' - skyfibers = np.array(qaframe.qa_data['SKYSUB']["METRICS"]["SKYFIBERID"]) - subtract_sky(frame, skymodel) - res=frame.flux[skyfibers] - res_ivar=frame.ivar[skyfibers] - if quick_look: - med_res = qaframe.qa_data['SKYSUB']["METRICS"]["MED_RESID_WAVE"] - wavg_res = qaframe.qa_data['SKYSUB']["METRICS"]["WAVG_RES_WAVE"] - else: - med_res = np.median(res,axis=0) - wavg_res = np.sum(res*res_ivar,0) / (np.sum(res_ivar,0) + (np.sum(res_ivar,0)==0)) - - # Plot - if quick_look: - fig = plt.figure(figsize=(8, 10.0)) - gs = gridspec.GridSpec(4,2) - else: - fig = plt.figure(figsize=(8, 6.0)) - gs = gridspec.GridSpec(2,2) - xmin,xmax = np.min(frame.wave), np.max(frame.wave) - - # Simple residual plot - ax0 = plt.subplot(gs[0,:]) - ax0.plot(frame.wave, med_res, label='Median Res') - ax0.plot(frame.wave, signal.medfilt(med_res,51), color='black', label='Median**2 Res') - ax0.plot(frame.wave, signal.medfilt(wavg_res,51), color='red', label='Med WAvgRes') - - # - ax0.plot([xmin,xmax], [0., 0], '--', color='gray') - ax0.plot([xmin,xmax], [0., 0], '--', color='gray') - ax0.set_xlabel('Wavelength') - ax0.set_ylabel('Sky Residuals (Counts)') - ax0.set_xlim(xmin,xmax) - ax0.set_xlabel('Wavelength') - ax0.set_ylabel('Sky Residuals (Counts)') - ax0.set_xlim(xmin,xmax) - med0 = np.maximum(np.abs(np.median(med_res)), 1.) - ax0.set_ylim(-5.*med0, 5.*med0) - #ax0.text(0.5, 0.85, 'Sky Meanspec', - # transform=ax_flux.transAxes, ha='center') - - # Legend - legend = ax0.legend(loc='upper right', borderpad=0.3, - handletextpad=0.3, fontsize='small') - - # Histogram of all residuals - ax1 = plt.subplot(gs[1,0]) - xmin,xmax = -5., 5. - - # Histogram - binsz = qaframe.qa_data['SKYSUB']["PARAMS"]["BIN_SZ"] - if 'DEVS_1D' in qaframe.qa_data['SKYSUB']["METRICS"].keys(): # Online - hist = np.asarray(qaframe.qa_data['SKYSUB']["METRICS"]["DEVS_1D"]) - edges = np.asarray(qaframe.qa_data['SKYSUB']["METRICS"]["DEVS_EDGES"]) - else: # Generate for offline - gd_res = res_ivar > 0. - if not np.any(gd_res): - log.info("No good residuals in frame_skyres plot") - edges = None - else: - devs = res[gd_res] * np.sqrt(res_ivar[gd_res]) - min_devs = np.maximum(np.min(devs), xmin*2) - max_devs = np.minimum(np.max(devs), xmax*2) - i0, i1 = int(min_devs/binsz) - 1, int(max_devs/binsz) + 1 - rng = tuple( binsz*np.array([i0,i1]) ) - nbin = i1-i0 - hist, edges = np.histogram(devs, range=rng, bins=nbin) - - if edges is not None: - xhist = (edges[1:] + edges[:-1])/2. - ax1.hist(xhist, color='blue', bins=edges, weights=hist)#, histtype='step') - # PDF for Gaussian - area = binsz * np.sum(hist) - xppf = np.linspace(scipy.stats.norm.ppf(0.0001), scipy.stats.norm.ppf(0.9999), 100) - ax1.plot(xppf, area*scipy.stats.norm.pdf(xppf), 'r-', alpha=1.0) - - ax1.set_xlabel(r'Res/$\sigma$') - ax1.set_ylabel('N') - ax1.set_xlim(xmin,xmax) - - # Meta text - #- limit the dictionary to residuals only for meta - qaresid=copy.deepcopy(qaframe) - resid_keys=['NREJ','NSKY_FIB','NBAD_PCHI','MED_RESID','RESID_PER'] - qaresid.qa_data['SKYSUB']['METRICS']={key:value for key,value in qaframe.qa_data['SKYSUB'] - ['METRICS'].items() if key in resid_keys} - - ax2 = plt.subplot(gs[1,1]) - ax2.set_axis_off() - show_meta(ax2, qaresid, 'SKYSUB', outfil) - - if quick_look: - #- SNR Plot - elg_snr_mag = qaframe.qa_data['SKYSUB']["METRICS"]["ELG_SNR_MAG"] - lrg_snr_mag = qaframe.qa_data['SKYSUB']["METRICS"]["LRG_SNR_MAG"] - qso_snr_mag = qaframe.qa_data['SKYSUB']["METRICS"]["QSO_SNR_MAG"] - star_snr_mag = qaframe.qa_data['SKYSUB']["METRICS"]["STAR_SNR_MAG"] - - ax3 = plt.subplot(gs[2,0]) - ax4 = plt.subplot(gs[2,1]) - ax5 = plt.subplot(gs[3,0]) - ax6 = plt.subplot(gs[3,1]) - - ax3.set_ylabel(r'Median S/N') - ax3.set_xlabel('') - ax3.set_title(r'ELG') - if len(elg_snr_mag[1]) > 0: #- at least 1 elg fiber? - select=np.where((elg_snr_mag[1] != np.array(None)) & (~np.isnan(elg_snr_mag[1])) & (np.abs(elg_snr_mag[1])!=np.inf))[0] #- Remove None, nan and inf values in mag - if select.shape[0]>0: - - xmin=np.min(elg_snr_mag[1][select])-0.1 - xmax=np.max(elg_snr_mag[1][select])+0.1 - ax3.set_xlim(xmin,xmax) - ax3.set_ylim(np.min(elg_snr_mag[0][select])-0.1,np.max(elg_snr_mag[0][select])+0.1) - ax3.xaxis.set_ticks(np.arange(int(np.min(elg_snr_mag[1][select])),int(np.max(elg_snr_mag[1][select]))+1,0.5)) - ax3.tick_params(axis='x',labelsize=10,labelbottom='on') - ax3.tick_params(axis='y',labelsize=10,labelleft='on') - ax3.plot(elg_snr_mag[1][select],elg_snr_mag[0][select],'b.') - - ax4.set_ylabel('') - ax4.set_xlabel('') - ax4.set_title(r'LRG') - if len(lrg_snr_mag[1]) > 0: #- at least 1 lrg fiber? - select=np.where((lrg_snr_mag[1] != np.array(None)) & (~np.isnan(lrg_snr_mag[1])) & (np.abs(lrg_snr_mag[1])!=np.inf))[0] - if select.shape[0]>0: - xmin=np.min(lrg_snr_mag[1][select])-0.1 - xmax=np.max(lrg_snr_mag[1][select])+0.1 - ax4.set_xlim(xmin,xmax) - ax4.set_ylim(np.min(lrg_snr_mag[0][select])-0.1,np.max(lrg_snr_mag[0][select])+0.1) - ax4.xaxis.set_ticks(np.arange(int(np.min(lrg_snr_mag[1][select])),int(np.max(lrg_snr_mag[1][select]))+1,0.5)) - ax4.tick_params(axis='x',labelsize=10,labelbottom='on') - ax4.tick_params(axis='y',labelsize=10,labelleft='on') - ax4.plot(lrg_snr_mag[1][select],lrg_snr_mag[0][select],'r.') - - ax5.set_ylabel(r'Median S/N') - ax5.set_xlabel(r'Mag. (DECAM_R)') - ax5.set_title(r'QSO') - if len(qso_snr_mag[1]) > 0: #- at least 1 qso fiber? - select=np.where((qso_snr_mag[1] != np.array(None)) & (~np.isnan(qso_snr_mag[1])) & (np.abs(qso_snr_mag[1])!=np.inf))[0] #- Remove None, nan and inf values - if select.shape[0]>0: - - xmin=np.min(qso_snr_mag[1][select])-0.1 - xmax=np.max(qso_snr_mag[1][select])+0.1 - ax5.set_xlim(xmin,xmax) - ax5.set_ylim(np.min(qso_snr_mag[0][select])-0.1,np.max(qso_snr_mag[0][select])+0.1) - ax5.xaxis.set_ticks(np.arange(int(np.min(qso_snr_mag[1][select])),int(np.max(qso_snr_mag[1][select]))+1,1.0)) - ax5.tick_params(axis='x',labelsize=10,labelbottom='on') - ax5.tick_params(axis='y',labelsize=10,labelleft='on') - ax5.plot(qso_snr_mag[1][select],qso_snr_mag[0][select],'g.') - - ax6.set_ylabel('') - ax6.set_xlabel('Mag. (DECAM_R)') - ax6.set_title(r'STD') - if len(star_snr_mag[1]) > 0: #- at least 1 std fiber? - select=np.where((star_snr_mag[1] != np.array(None)) & (~np.isnan(star_snr_mag[1])) & (np.abs(star_snr_mag[1])!=np.inf))[0] - if select.shape[0]>0: - xmin=np.min(star_snr_mag[1][select])-0.1 - xmax=np.max(star_snr_mag[1][select])+0.1 - ax6.set_xlim(xmin,xmax) - ax6.set_ylim(np.min(star_snr_mag[0][select])-0.1,np.max(star_snr_mag[0][select])+0.1) - ax6.xaxis.set_ticks(np.arange(int(np.min(star_snr_mag[1][select])),int(np.max(star_snr_mag[1][select]))+1,0.5)) - ax6.tick_params(axis='x',labelsize=10,labelbottom='on') - ax6.tick_params(axis='y',labelsize=10,labelleft='on') - ax6.plot(star_snr_mag[1][select],star_snr_mag[0][select],'k.') - - """ - # Meta - xlbl = 0.1 - ylbl = 0.85 - i0 = outfil.rfind('/') - ax2.text(xlbl, ylbl, outfil[i0+1:], color='black', transform=ax2.transAxes, ha='left') - yoff=0.15 - for key in sorted(qaframe.data['SKYSUB']['METRICS'].keys()): - if key in ['QA_FIG']: - continue - # Show - ylbl -= yoff - ax2.text(xlbl+0.1, ylbl, key+': '+str(qaframe.data['SKYSUB']['METRICS'][key]), - transform=ax2.transAxes, ha='left', fontsize='small') - """ - - - ''' - # Residuals - scatt_sz = 0.5 - ax_res = plt.subplot(gs[1]) - ax_res.get_xaxis().set_ticks([]) # Suppress labeling - res = (sky_model - (true_flux*scl))/(true_flux*scl) - rms = np.sqrt(np.sum(res**2)/len(res)) - #ax_res.set_ylim(-3.*rms, 3.*rms) - ax_res.set_ylim(-2, 2) - ax_res.set_ylabel('Frac Res') - # Error - #ax_res.plot(true_wave, 2.*ms_sig/sky_model, color='red') - ax_res.scatter(wave,res, marker='o',s=scatt_sz) - ax_res.plot([xmin,xmax], [0.,0], 'g-') - ax_res.set_xlim(xmin,xmax) - - # Relative to error - ax_sig = plt.subplot(gs[2]) - ax_sig.set_xlabel('Wavelength') - sig_res = (sky_model - (true_flux*scl))/sky_sig - ax_sig.scatter(wave, sig_res, marker='o',s=scatt_sz) - ax_sig.set_ylabel(r'Res $\delta/\sigma$') - ax_sig.set_ylim(-5., 5.) - ax_sig.plot([xmin,xmax], [0.,0], 'g-') - ax_sig.set_xlim(xmin,xmax) - ''' - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - outfile = makepath(outfil) - plt.savefig(outfil) - plt.close() - print('Wrote QA SkyRes file: {:s}'.format(outfil)) - -def frame_fluxcalib(outfil, qaframe, frame, fluxcalib): - """ QA plots for Flux calibration in a Frame - - Args: - outfil: str, name of output file - qaframe: dict containing QA info - frame: frame object containing extraction of standard stars - fluxcalib: fluxcalib object containing flux calibration - - Returns: - """ - log = get_logger() - - # Standard stars - exptime = frame.meta['EXPTIME'] - stdfibers = np.where(isStdStar(frame.fibermap))[0] - stdstars = frame[stdfibers] - #nstds = np.sum(stdfibers) - nstds = len(stdfibers) - - # Median spectrum - medcalib = np.median(fluxcalib.calib[stdfibers],axis=0) - ZP_AB = dsflux.ZP_from_calib(exptime, fluxcalib.wave, medcalib) - - - # Plot - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(2,2) - - xmin,xmax = np.min(fluxcalib.wave), np.max(fluxcalib.wave) - - # Simple residual plot - ax0 = plt.subplot(gs[0,:]) - - # - #ax0.plot([xmin,xmax], [0., 0], '--', color='gray') - #ax0.plot([xmin,xmax], [0., 0], '--', color='gray') - ax0.set_ylabel('ZP_AB') - ax0.set_xlim(xmin, xmax) - ax0.set_xlabel('Wavelength') - #med0 = np.maximum(np.abs(np.median(med_res)), 1.) - #ax0.set_ylim(-5.*med0, 5.*med0) - #ax0.text(0.5, 0.85, 'Sky Meanspec', - # transform=ax_flux.transAxes, ha='center') - - # Other stars - for ii in range(nstds): - # Good pixels - gdp = stdstars.ivar[ii, :] > 0. - icalib = fluxcalib.calib[stdfibers[ii]][gdp] - i_wave = fluxcalib.wave[gdp] - ZP_star = dsflux.ZP_from_calib(exptime, i_wave, icalib) - # Plot - if ii == 0: - lbl ='Individual stars' - else: - lbl = None - ax0.plot(i_wave, ZP_star, ':', label=lbl) - ax0.plot(fluxcalib.wave, ZP_AB, color='black', label='Median Calib') - - # Legend - legend = ax0.legend(loc='lower left', borderpad=0.3, - handletextpad=0.3, fontsize='small') - - # Meta text - ax2 = plt.subplot(gs[1,1]) - ax2.set_axis_off() - show_meta(ax2, qaframe, 'FLUXCALIB', outfil) - - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - _ = makepath(outfil) - plt.savefig(outfil) - plt.close() - print('Wrote QA SkyRes file: {:s}'.format(outfil)) - - -def exposure_fluxcalib(outfil, qa_data): - """ QA plots for Flux calibration in an Exposure - - Args: - outfil: str -- Name of PDF file - qa_data: dict -- QA data, including that of the individual frames - """ - # Init - cameras = list(qa_data['frames'].keys()) - # Plot - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(2, 2) - - # Loop on channel - clrs = dict(b='blue', r='red', z='purple') - for qq, channel in enumerate(['b','r','z']): - - ax = plt.subplot(gs[qq % 2, qq // 2]) - allc = [] - for camera in cameras: - if camera[0] == channel: - allc.append(int(camera[1])) - ax.errorbar([int(camera[1])], - [qa_data['frames'][camera]['FLUXCALIB']['METRICS']['ZP']], - yerr=[qa_data['frames'][camera]['FLUXCALIB']['METRICS']['RMS_ZP']], - capthick=2, fmt='o', color=clrs[channel]) - - - # - #ax0.plot([xmin,xmax], [0., 0], '--', color='gray') - #ax0.plot([xmin,xmax], [0., 0], '--', color='gray') - ax.set_ylabel('ZP_AB') - #import pdb; pdb.set_trace() - ax.set_xlim(np.min(allc)-0.2, np.max(allc)+0.2) - ax.set_xlabel('Spectrograph') - #med0 = np.maximum(np.abs(np.median(med_res)), 1.) - #ax0.set_ylim(-5.*med0, 5.*med0) - #ax0.text(0.5, 0.85, 'Sky Meanspec', - # transform=ax_flux.transAxes, ha='center') - - # Meta text - #ax2 = plt.subplot(gs[1,1]) - #ax2.set_axis_off() - #show_meta(ax2, qaframe, 'FLUXCALIB', outfil) - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - _ = makepath(outfil) - plt.savefig(outfil) - plt.close() - print('Wrote QA FluxCalib Exposure file: {:s}'.format(outfil)) - - -def frame_fiberflat(outfil, qaframe, frame, fiberflat): - """ QA plots for fiber flat - - Args: - outfil: - qaframe: - frame: - fiberflat: - clobber: bool, optional - - Returns: - Stuff? - """ - from desimodel.focalplane import fiber_area_arcsec2 - # Setup - fibermap = frame.fibermap - gdp = fiberflat.mask == 0 - nfiber = len(frame.fibers) - xfiber = np.zeros(nfiber) - yfiber = np.zeros(nfiber) - for ii,fiber in enumerate(frame.fibers): - mt = np.where(fiber == fibermap['FIBER'])[0] - xfiber[ii] = fibermap['FIBERASSIGN_X'][mt] - yfiber[ii] = fibermap['FIBERASSIGN_Y'][mt] - area = fiber_area_arcsec2(xfiber,yfiber) - mean_area = np.mean(area) - - jet = cm = plt.get_cmap('jet') - - # Tile plot(s) - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(2,2) - - # Mean Flatfield flux in each fiber - ax = plt.subplot(gs[0,0]) - ax.xaxis.set_major_locator(plt.MultipleLocator(100.)) - - mean_flux = np.mean(frame.flux*gdp, axis=1) / fiber_area_arcsec2(xfiber,yfiber) - rms_mean = np.std(mean_flux) - med_mean = np.median(mean_flux) - #from xastropy.xutils import xdebug as xdb - #pdb.set_trace() - mplt = ax.scatter(xfiber, yfiber, marker='o', s=9., c=mean_flux, cmap=jet) - mplt.set_clim(vmin=med_mean-2*rms_mean, vmax=med_mean+2*rms_mean) - cb = fig.colorbar(mplt) - cb.set_label('Mean Flux') - - # Mean - ax = plt.subplot(gs[0,1]) - ax.xaxis.set_major_locator(plt.MultipleLocator(100.)) - mean_norm = np.mean(fiberflat.fiberflat*gdp,axis=1) / (area/mean_area) - m2plt = ax.scatter(xfiber, yfiber, marker='o', s=9., c=mean_norm, cmap=jet) - #m2plt.set_clim(vmin=0.98, vmax=1.02) - cb = fig.colorbar(m2plt) - cb.set_label('Mean of Fiberflat') - - # RMS - ax = plt.subplot(gs[1,0]) - ax.xaxis.set_major_locator(plt.MultipleLocator(100.)) - rms = np.std(gdp*(fiberflat.fiberflat- - np.outer(mean_norm, np.ones(fiberflat.nwave))),axis=1) - rplt = ax.scatter(xfiber, yfiber, marker='o', s=9., c=rms, cmap=jet) - #rplt.set_clim(vmin=0.98, vmax=1.02) - cb = fig.colorbar(rplt) - cb.set_label('RMS in Fiberflat') - - # Meta text - ax2 = plt.subplot(gs[1,1]) - ax2.set_axis_off() - show_meta(ax2, qaframe, 'FIBERFLAT', outfil) - """ - xlbl = 0.05 - ylbl = 0.85 - i0 = outfil.rfind('/') - ax2.text(xlbl, ylbl, outfil[i0+1:], color='black', transform=ax2.transAxes, ha='left') - yoff=0.10 - for key in sorted(qaframe.data['FIBERFLAT']['METRICS'].keys()): - if key in ['QA_FIG']: - continue - # Show - ylbl -= yoff - ax2.text(xlbl+0.05, ylbl, key+': '+str(qaframe.data['FIBERFLAT']['METRICS'][key]), - transform=ax2.transAxes, ha='left', fontsize='x-small') - """ - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - _ = makepath(outfil) - plt.savefig(outfil) - plt.close() - print('Wrote QA SkyRes file: {:s}'.format(outfil)) - -def frame_s2n(s2n_dict, outfile, rescut=0.2, verbose=True): - """ - Plot S/N diagnostics for a given frame - Replaces a previous-QL script - - Args: - s2n_dict (dict): dictionary of qa outputs repackaged a bit - outfile (str): output png filename - rescut (float, optional): only plot residuals (+/-) less than rescut - """ - from desispec.qa.qalib import s2n_flux_astro - # Plot - stypes = get_sty_otype() - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(2,6) - plt.suptitle("Signal/Noise after {}, Camera: {}, ExpID: {}".format( - s2n_dict['PANAME'], s2n_dict['CAMERA'], s2n_dict['EXPID']), fontsize=10, y=0.99) - cmap = plt.get_cmap('RdBu') - - # Unpack a bit - objlist = s2n_dict['METRICS']['OBJLIST'] - nfibers = len(s2n_dict['METRICS']['MEDIAN_SNR']) - - # Loop over object types - resids = np.zeros(nfibers) - mags, snrs = [], [] - ss = 0 - for oid, otype in enumerate(objlist): - # Truncate - if ss > 5: - continue - mag = s2n_dict["METRICS"]["SNR_MAG_TGT"][oid][1] - snr = s2n_dict["METRICS"]["SNR_MAG_TGT"][oid][0] - mags += mag - snrs += snr - - # Residuals - fibers = s2n_dict['METRICS']['%s_FIBERID' % otype] - coeff = np.array(s2n_dict['METRICS']['FITCOEFF_TGT'][oid]) - if np.any(np.isnan(coeff)): - continue - amag = np.array(mag) - flux = 10 ** (-0.4 * (amag - 22.5)) - fit_snr = s2n_flux_astro(flux, *coeff) * s2n_dict['METRICS']['EXPTIME'] ** (1 / 2) - resids[fibers] = (s2n_dict['METRICS']['MEDIAN_SNR'][fibers] - fit_snr) / fit_snr - - # Object fits - ax_obj = plt.subplot(gs[0, ss]) - # Scatter - if otype in stypes.keys(): - clr = stypes[otype]['color'] - else: - clr = 'gray' - ax_obj.scatter(amag, snr, s=1, color=clr) - - xval = np.linspace(np.min(amag), np.max(amag)) - xflux = 10 ** (-0.4 * (xval - 22.5)) - ax_obj.plot(xval, s2n_flux_astro(xflux, *coeff) * s2n_dict['METRICS']['EXPTIME'] ** (1/2), - color='k') - #ax_obj.set_xlabel('{:s} mag ({:s})'.format(otype, s2n_dict['METRICS']['FIT_FILTER'])) - ax_obj.set_xlabel('{:s}'.format(otype)) - ax_obj.set_ylabel('S/N') - ax_obj.set_yscale('log') - # Increment - ss += 1 - - # Median S/N - ax_S2N = plt.subplot(gs[1, 0:3]) - - gdfibers = s2n_dict['METRICS']['MEDIAN_SNR'] > 0. - ax_S2N.scatter(np.arange(nfibers)[gdfibers], s2n_dict['METRICS']['MEDIAN_SNR'][gdfibers]) - #import pdb; pdb.set_trace() - ax_S2N.set_xlabel('Fiber #') - ax_S2N.set_ylabel('Median S/N') - ax_S2N.set_yscale('log', nonposy='clip') - ax_S2N.set_ylim(0.01, 100.) - - # RA, DEC with residuals - ax_res = plt.subplot(gs[1, 3:]) - ax_res.set_title('Residual SNR: (calculated SNR - fit SNR) / fit SNR', fontsize=8) - resid_plot = ax_res.scatter(s2n_dict['METRICS']['RA'], s2n_dict['METRICS']['DEC'], - s=2, c=resids, cmap=cmap, vmin=-rescut, vmax=rescut) - fig.colorbar(resid_plot, ticks=[-rescut, 0., rescut]) - #fig.colorbar(resid_plot, ticks=[np.min(resids), 0, np.max(resids)]) - ax_res.set_xlabel('RA') - ax_res.set_ylabel('DEC') - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - _ = makepath(outfile) - plt.savefig(outfile) - if verbose: - print("Wrote: {:s}".format(outfile)) - plt.close() - -def exposure_fiberflat(channel, expid, metric, outfile=None): - """ Generate an Exposure level plot of a FiberFlat metric - Args: - channel: str, e.g. 'b', 'r', 'z' - expid: int - metric: str, allowed entires are: ['meanflux'] - - Returns: - - """ - from desispec.io.meta import find_exposure_night, findfile - from desispec.io.frame import read_meta_frame, read_frame - from desispec.io.fiberflat import read_fiberflat - from desimodel.focalplane import fiber_area_arcsec2 - log = get_logger() - # Find exposure - night = find_exposure_night(expid) - # Search for frames with the input channel - frame0 = findfile('frame', camera=channel+'0', night=night, expid=expid) - if not os.path.exists(frame0): - log.fatal("No Frame 0 for channel={:s} and expid={:d}".format(channel, expid)) - # Confirm frame is a Flat - fmeta = read_meta_frame(frame0) - assert fmeta['FLAVOR'].strip() == 'flat' - # Load up all the frames - x,y,metrics = [],[],[] - for wedge in range(10): - # Load - frame_file = findfile('frame', camera=channel+'{:d}'.format(wedge), night=night, expid=expid) - fiber_file = findfile('fiberflat', camera=channel+'{:d}'.format(wedge), night=night, expid=expid) - try: - frame = read_frame(frame_file) - except: - continue - else: - fiberflat = read_fiberflat(fiber_file) - fibermap = frame.fibermap - gdp = fiberflat.mask == 0 - # X,Y - x.append([fibermap['FIBERASSIGN_X']]) - y.append([fibermap['FIBERASSIGN_Y']]) - area = fiber_area_arcsec2(x[-1], y[-1]) - mean_area = np.mean(area) - # Metric - if metric == 'meanflux': - mean_norm = np.mean(fiberflat.fiberflat*gdp,axis=1) / (area / mean_area) - metrics.append([mean_norm]) - # Cocatenate - x = np.concatenate(x) - y = np.concatenate(y) - metrics = np.concatenate(metrics) - # Plot - if outfile is None: - outfile='qa_{:08d}_{:s}_fiberflat.png'.format(expid, channel) - exposure_map(x,y,metrics, mlbl='Mean Flux', - title='Mean Flux for Exposure {:08d}, Channel {:s}'.format(expid, channel), - outfile=outfile) - - -def exposure_map(x,y,metric,mlbl=None, outfile=None, title=None, - ax=None, fig=None, psz=9., cmap=None, vmnx=None): - """ Generic method used to generated Exposure level QA - One channel at a time - - Args: - x: list or ndarray - y: list or ndarray - metric: list or ndarray - mlbl: str, optional - outfile: str, optional - title: str, optional - """ - # Tile plot(s) - if ax is None: - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(1,1) - ax = plt.subplot(gs[0]) - # - if cmap is None: - cmap = plt.get_cmap('jet') - if mlbl is None: - mlbl = 'Metric' - - # Mean Flatfield flux in each fiber - ax.set_aspect('equal', 'datalim') - if title is not None: - ax.set_title(title) - - mplt = ax.scatter(x,y,marker='o', s=psz, c=metric.reshape(x.shape), cmap=cmap) - #mplt.set_clim(vmin=med_mean-2*rms_mean, vmax=med_mean+2*rms_mean) - if fig is not None: - cb = fig.colorbar(mplt) - cb.set_label(mlbl) - # - if vmnx is not None: - mplt.set_clim(vmin=vmnx[0], vmax=vmnx[1]) - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - if outfile is not None: - _ = makepath(outfile) - plt.savefig(outfile) - print('Wrote QA SkyRes file: {:s}'.format(outfile)) - plt.close() - - -def exposure_s2n(qa_exp, metric, outfile='exposure_s2n.png', verbose=True, - specprod_dir=None): - """ Generate an Exposure level plot of a S/N metric - Args: - qa_exp: QA_Exposure - metric: str, allowed entires are: ['resid'] - specprod_dir: str, optional - - Returns: - - """ - from desispec.io.meta import find_exposure_night, findfile - from desispec.io.frame import read_meta_frame, read_frame - - log = get_logger() - - cclrs = get_channel_clrs() - - # Find exposure - night = find_exposure_night(qa_exp.expid, specprod_dir=specprod_dir) - - - # Plot - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(6,2) - cmap = plt.get_cmap('RdBu') - - # Prep - qa_exp.qa_s2n['X'] = 0. - qa_exp.qa_s2n['Y'] = 0. - - # Load up all the frames - for ss,channel in enumerate(['b','r','z']): - # ax - if channel != 'z': - ax = plt.subplot(gs[0:3, ss]) - else: - ax = plt.subplot(gs[-3:, 1]) - - # Load up X, Y - for wedge in range(10): - # Load - camera=channel+'{:d}'.format(wedge) - frame_file = findfile('frame', camera=camera, night=night, expid=qa_exp.expid, - specprod_dir=specprod_dir) - try: - frame = read_frame(frame_file) - except: - continue - fibermap = frame.fibermap - # - rows = np.where(qa_exp.qa_s2n['CAMERA'] == camera)[0] - qa_exp.qa_s2n['X'][rows] = [fibermap['FIBERASSIGN_X'][qa_exp.qa_s2n['FIBER'][rows]]] - qa_exp.qa_s2n['Y'][rows] = [fibermap['FIBERASSIGN_Y'][qa_exp.qa_s2n['FIBER'][rows]]] - - # Metric - if metric == 'resid': - rows = (qa_exp.qa_s2n['CHANNEL'] == channel) & (qa_exp.qa_s2n['RESID'] != -999.) - metrics = qa_exp.qa_s2n['RESID'][rows] - else: - log.error("NOT READY FOR THIS QA METRIC") - # Unpack for plotting - x = qa_exp.qa_s2n['X'][rows] - y = qa_exp.qa_s2n['Y'][rows] - mags = qa_exp.qa_s2n['MAGS'][rows] - s2n = qa_exp.qa_s2n['MEDIAN_SNR'][rows] - - # Exposure - exposure_map(x,y, metrics, mlbl='S/N '+metric, ax=ax, fig=fig, - title=None, outfile=None, psz=1., cmap=cmap, vmnx=[-0.9,0.9]) - # Label - ax.text(0.05, 0.9, channel, color=cclrs[channel], transform=ax.transAxes, ha='left') - - # Scatter + fit - ax_summ = plt.subplot(gs[-3+ss,0]) - ax_summ.scatter(mags, s2n, color=cclrs[channel], s=1.) - if ss < 2: - ax_summ.get_xaxis().set_ticks([]) - # Axes - ax_summ.set_yscale('log', nonposy='clip') - if ss == 1: - ax_summ.set_ylabel('S/N') - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - _ = makepath(outfile) - plt.savefig(outfile) - if verbose: - print("Wrote: {:s}".format(outfile)) - plt.close() - - - - -def show_meta(ax, qaframe, qaflavor, outfil): - """ Show meta data on the figure - - Args: - ax: matplotlib.ax - qaframe: QA_Frame - qaflavor: str - - Returns: - """ - # Meta - xlbl = 0.05 - ylbl = 0.85 - yoff=0.10 - i0 = outfil.rfind('/') - ax.text(xlbl, ylbl, outfil[i0+1:], color='black', transform=ax.transAxes, ha='left') - # Night - ylbl -= yoff - ax.text(xlbl+0.1, ylbl, f'Night: {qaframe.night}', - transform=ax.transAxes, ha='left', fontsize='x-small') - # Rest - for key in sorted(qaframe.qa_data[qaflavor]['METRICS'].keys()): - if key in ['QA_FIG']: - continue - # Show - ylbl -= yoff - ax.text(xlbl+0.1, ylbl, key+': '+str(qaframe.qa_data[qaflavor]['METRICS'][key]), - transform=ax.transAxes, ha='left', fontsize='x-small') - - -def get_sty_otype(): - """Styles for plots""" - sty_otype = dict(ELG={'color':'green', 'lbl':'ELG'}, - LRG={'color':'red', 'lbl':'LRG'}, - STAR={'color':'black', 'lbl':'STAR'}, - QSO={'color':'blue', 'lbl':'QSO'}, - QSO_L={'color':'blue', 'lbl':'QSO z>2.1'}, - QSO_T={'color':'cyan', 'lbl':'QSO z<2.1'}) - return sty_otype - - -def prod_channel_hist(qa_prod, qatype, metric, xlim=None, outfile=None, pp=None, close=True): - """ Generate a series of histrograms (one per channel) - - Args: - qa_prod: QA_Prod class - qatype: str - metric: str - xlim: tuple, optional - outfile: str, optional - pp: PdfPages, optional - close: bool, optional - - Returns: - - """ - log = get_logger() - # Setup - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(2,2) - - # Loop on channel - clrs = get_channel_clrs() - for qq, channel in enumerate(['b', 'r', 'z']): - ax = plt.subplot(gs[qq]) - #ax.xaxis.set_major_locator(plt.MultipleLocator(100.)) - - # Grab QA - qa_tbl = qa_prod.get_qa_table(qatype, metric, channels=channel) - # Check for nans - qa_arr = qa_tbl[metric] - isnan = np.isnan(qa_arr) - if np.sum(isnan) > 0: - log.error("NAN in qatype={:s}, metric={:s} for channel={:s}".format( - qatype, metric, channel)) - qa_arr[isnan] = -999. - # Histogram - ax.hist(qa_arr, color=clrs[channel]) - #import pdb; pdb.set_trace() - # Label - ax.text(0.05, 0.85, channel, color='black', transform=ax.transAxes, ha='left') - ax.set_xlabel('{:s} :: {:s}'.format(qatype,metric)) - if xlim is not None: - ax.set_xlim(xlim) - - # Meta - ''' - ax = plt.subplot(gs[3]) - ax.set_axis_off() - xlbl = 0.05 - ylbl = 0.85 - yoff = 0.1 - ax.text(xlbl, ylbl, qa_prod.prod_name, color='black', transform=ax.transAxes, ha='left') - nights = list(qa_tbl['NIGHT']) - # - ylbl -= yoff - ax.text(xlbl+0.1, ylbl, 'Nights: {}'.format(nights), - transform=ax.transAxes, ha='left', fontsize='x-small') - # - ylbl -= yoff - expids = list(qa_tbl['EXPID']) - ax.text(xlbl+0.1, ylbl, 'Exposures: {}'.format(expids), - transform=ax.transAxes, ha='left', fontsize='x-small') - ''' - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - if outfile is not None: - plt.savefig(outfile) - if close: - plt.close() - elif pp is not None: - pp.savefig() - if close: - plt.close() - pp.close() - else: # Show - plt.show() - -def prod_time_series(qa_multi, qatype, metric, outfile=None, close=True, pp=None, - bright_dark=0, exposures=False, night=None, horiz_line=None): - """ Generate a time series plot for a production - Can be MJD or Exposure number - - Args: - qa_multi: QA_Prod or QA_Night - qatype: str - metric: str - outfile: str, optional - close: bool, optional - pp: - bright_dark: int, optional; (flag: 0=all; 1=bright; 2=dark) - night: str, optional - Only used for the Title - horiz_line: float, optional - Draw a horizontal line at input value - """ - - log = get_logger() - - # Setup - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(3,1) - - - # Loop on channel - clrs = get_channel_clrs() - - # Grab QA - all_times = [] - all_ax = [] - for cc, channel in enumerate(['b','r','z']): - ax = plt.subplot(gs[cc]) - qa_tbl = qa_multi.get_qa_table(qatype, metric, channels=channel) - if len(qa_tbl) == 0: - log.info("QA Table is empty.. Maybe you input an incorrect metric?") - continue - ''' - # Check for nans - isnan = np.isnan(qa_arr) - if np.sum(isnan) > 0: - log.error("NAN in qatype={:s}, metric={:s} for channel={:s}".format( - qatype, metric, channel)) - qa_arr[isnan] = -999. - ''' - # Convert Date to MJD - atime = Time(qa_tbl['DATE-OBS'], format='isot', scale='utc') - atime.format = 'mjd' - mjd = atime.value - - # Bright dark - if bright_dark == 0: # All - pass - elif bright_dark == 1: # Bright - log.info("Using a bright/dark kludge for now") - bright = qa_tbl['EXPTIME'] < 1200. - qa_tbl = qa_tbl[bright] - mjd = mjd[bright] - elif bright_dark == 2: # Dark - log.info("Using a bright/dark kludge for now") - dark = qa_tbl['EXPTIME'] > 1200. - qa_tbl = qa_tbl[dark] - mjd = mjd[dark] - - # Scatter me - if exposures: - xval = qa_tbl['EXPID'] - plt.xticks(rotation=90) - else: - xval = mjd - ax.scatter(xval, qa_tbl[metric], color=clrs[channel], s=4.) - # Camera - ax.text(0.05, 0.85, channel, - transform=ax.transAxes, fontsize=13., ha='left', color=clrs[channel]) - # Axes - ax.set_ylabel('Metric') - if cc < 2: - ax.get_xaxis().set_ticks([]) - if cc == 0: - title = '{:s} :: {:s}'.format(qatype,metric) - if night is not None: - title = str(night)+' '+title - # - ax.set_title(title) - # Horizontal line? - if horiz_line is not None: - ax.axhline(horiz_line, color='gray', ls='--') - # Append - all_times.append(mjd) - all_ax.append(ax) - - # Label - #ax.text(0.05, 0.85, channel, color='black', transform=ax.transAxes, ha='left') - if exposures: - ax.set_xlabel('EXPID') - else: - ax.set_xlabel('MJD') - all_times = np.concatenate(all_times) - xmin, xmax = np.min(all_times), np.max(all_times) - for cc in range(3): - all_ax[cc].set_xlim(xmin,xmax) - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - if outfile is not None: - plt.savefig(outfile) - print("Wrote QA file: {:s}".format(outfile)) - if close: - plt.close() - elif pp is not None: - pp.savefig() - if close: - plt.close() - pp.close() - else: # Show - plt.show() - - -def prod_avg_s2n(qa_prod, outfile=None, optypes=['ELG'], xaxis='MJD', - fiducials=None): - """ - Generate a plot summarizing average S/N in a production - for a few object types in a few cameras - - Args: - qa_prod: QA_Prod object - outfile: str, optional - Output file name - optypes: list, optional - List of fiducial objects to show - Options are: ELG, LRG, QSO - xaxis: str, optional - Designate x-axis. Options are: MJD, expid, texp - fiducials: - - Returns: - - """ - - markers = {'b': '*', 'r': 's', 'z': 'o'} - # Hard-code metrics for now - if fiducials is None: - fiducials = {} - fiducials['ELG'] = dict(otype='ELG', channel='r', ref_mag=23., color='g') - fiducials['LRG'] = dict(otype='LRG', channel='z', ref_mag=22., color='r') - fiducials['QSO'] = dict(otype='QSO', channel='b', ref_mag=22, color='b') - # Grab em - oplots = [fiducials[ftype] for ftype in optypes] - nplots = len(oplots) - - # Calculate - SN_vals = [[] for i in range(nplots)] - SN_sig = [[] for i in range(nplots)] - mjds = [[] for i in range(nplots)] - expids = [[] for i in range(nplots)] - texps = [[] for i in range(nplots)] - dates = [[] for i in range(nplots)] - - # Loop on exposure - for qaexp in qa_prod.qa_exps: - if qaexp.qa_s2n is None or len(qaexp.qa_s2n) == 0: - continue - # Loop on objects to plot - for itype, oplot in enumerate(oplots): - gdobj = qaexp.qa_s2n['OBJTYPE'] == oplot['otype'] - if not np.any(gdobj): - continue - # S/N - fit_snrs = [] - for wedge in range(10): - gdcam = qaexp.qa_s2n['CAMERA'] == '{:s}{:d}'.format(oplot['channel'], wedge) - rows = gdcam & gdobj - if not np.any(rows): - continue - # Grab the first one; should be the same for all - idx = np.where(rows)[0][0] - coeff = qaexp.qa_s2n['COEFFS'][idx,:] - # Evaluate - funcMap = s2n_funcs(exptime=qaexp.qa_s2n.meta['EXPTIME']) - fitfunc = funcMap['astro'] - flux = 10 ** (-0.4 * (oplot['ref_mag'] - 22.5)) - fit_snrs.append(fitfunc(flux, *coeff)) - SN_vals[itype].append(np.mean(fit_snrs)) - SN_sig[itype].append(np.std(fit_snrs)) - # Meta - dates[itype].append(qaexp.qa_s2n.meta['DATE-OBS']) - texps[itype].append(qaexp.qa_s2n.meta['EXPTIME']) - expids[itype].append(qaexp.expid) - - # A bit more prep - for itype, oplot in enumerate(oplots): - atime = Time(dates[itype], format='isot', scale='utc') - atime.format = 'mjd' - mjds[itype] = atime.value - - # Setup - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(1,1) - ax = plt.subplot(gs[0]) - - for itype, oplot in enumerate(oplots): - # Empty - if len(dates[itype]) == 0: - continue - # Nope, let's plot - if xaxis == 'MJD': - xval = mjds[itype] - elif xaxis == 'expid': - xval = expids[itype] - elif xaxis == 'texp': - xval = texps[itype] - # Plot - ax.errorbar(xval, SN_vals[itype], yerr=SN_sig[itype], label='{:s}: {:s} {:0.1f}'.format( - oplot['otype'], oplot['channel'], oplot['ref_mag']), ls='none', - color=oplot['color'], marker=markers[oplot['channel']]) - - ax.set_xlabel(xaxis) - ax.set_ylabel('') - legend = ax.legend(loc='upper right', borderpad=0.3, - handletextpad=0.3, fontsize='small') - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - if outfile is not None: - plt.savefig(outfile) - print("Wrote QA file: {:s}".format(outfile)) - plt.close() - else: # Show - plt.show() - - -def prod_ZP(qa_prod, outfile=None, channels=('b','r','z'), xaxis='MJD'): - """ - Generate a plot summarizing the ZP for a production - - Args: - qa_prod: QA_Prod object - outfile: str, optional - Output file name - channels: tuple, optional - List of channels to show - xaxis: str, optional - Designate x-axis. Options are: MJD, expid, texp - - Returns: - - """ - markers = {'b': '*', 'r': 's', 'z': 'o'} - # Setup - nplots = len(channels) - ZP_vals = [[] for i in range(nplots)] - ZP_sig = [[] for i in range(nplots)] - mjds = [[] for i in range(nplots)] - expids = [[] for i in range(nplots)] - texps = [[] for i in range(nplots)] - dates = [[] for i in range(nplots)] - - # Loop on exposure - for ic, channel in enumerate(channels): - # Generate the table - ZP_tbl = qa_prod.get_qa_table('FLUXCALIB', 'ZP', channels=[channel]) - # Loop on expid - uni_expid = np.unique(ZP_tbl['EXPID']) - for expid in uni_expid: - rows = ZP_tbl['EXPID'] == expid - irow = np.where(rows)[0][0] - ZP_vals[ic].append(np.median(ZP_tbl["ZP"][rows].data)) - ZP_sig[ic].append(np.std(ZP_tbl["ZP"][rows].data)) - # Meta - dates[ic].append(ZP_tbl['DATE-OBS'][irow]) - texps[ic].append(ZP_tbl['EXPTIME'][irow]) - expids[ic].append(expid) - - # A bit more prep - for ic in range(nplots): - atime = Time(dates[ic], format='isot', scale='utc') - atime.format = 'mjd' - mjds[ic] = atime.value - - # Setup - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(1, 1) - ax = plt.subplot(gs[0]) - - for ic, channel in enumerate(channels): - # Empty - if len(dates[ic]) == 0: - continue - # Nope, let's plot - if xaxis == 'MJD': - xval = mjds[ic] - elif xaxis == 'expid': - xval = expids[ic] - elif xaxis == 'texp': - xval = texps[ic] - # Plot - ax.errorbar(xval, ZP_vals[ic], yerr=ZP_sig[ic], label=channel, - ls='none', color=get_channel_clrs()[channel], - marker=markers[channel]) - - ax.set_xlabel(xaxis) - ax.set_ylabel('ZP') - legend = ax.legend(loc='upper right', borderpad=0.3, - handletextpad=0.3, fontsize='small') - - # Finish - plt.tight_layout(pad=0.1, h_pad=0.0, w_pad=0.0) - if outfile is not None: - plt.savefig(outfile) - print("Wrote QA file: {:s}".format(outfile)) - plt.close() - else: # Show - plt.show() - - -def skyline_resid(channel, sky_wave, sky_flux, sky_res, sky_ivar, outfile=None, pp=None, - close=True, dpi=700): - """ QA plot for residuals on sky lines - ala Julien Guy - Args: - sky_wave: - sky_flux: - sky_res: - outfile: - pp: - close: - nslices: - dpi: - - Returns: - - """ - # Grab the sky lines - sky_peaks = desi_params['qa']['skypeaks']['PARAMS']['{:s}_PEAKS'.format(channel.upper())] - npeaks = len(sky_peaks) - - # Collapse the sky data - #sky_wave = np.median(sky_wave, axis=0) - #sky_res = np.median(sky_res, axis=0) - #sky_ivar = np.median(sky_ivar, axis=0) - #sky_flux = np.median(sky_flux, axis=0) - - # Start the plot - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(npeaks, 1) - - wv_off = 15. - - clrs = dict(b='b', r='r', z='purple') - - # Loop on peaks - for ss,peak in enumerate(sky_peaks): - ax= plt.subplot(gs[ss]) - - # Zoom in - pix = np.abs(sky_wave[0,:]-peak) < wv_off - - # Calculate - orig = np.sqrt(np.mean(sky_ivar[:,pix] * sky_res[:,pix]**2, axis=0)) - lbl=r"$\sqrt{ }$" - if ss > 0: - lbl = None - ax.plot(sky_wave[0,pix], orig, color=clrs[channel], label=lbl) - - # Sky scaling - #lbl=r"$\sqrt{ < 1+(0.05 sky)^2/\sigma^2 > }$" - - # Labels - ax.set_ylabel(r'$n \sigma$') - #ax_flux.set_ylabel('Residual Flux') - ax.set_ylim(bottom=0.) - ax.axhline(1., color='gray', linestyle='dashed') - - if ss == 0: - legend = ax.legend(loc='upper left', borderpad=0.3, - handletextpad=0.3, fontsize='small') - - # Finish - plt.tight_layout(pad=0.1, h_pad=0.0, w_pad=0.0) - if outfile is not None: - plt.savefig(outfile, dpi=dpi) - if close: - plt.close() - elif pp is not None: - pp.savefig() - if close: - plt.close() - pp.close() - else: # Show - plt.show() - -def skysub_resid_dual(sky_wave, sky_flux, sky_res, outfile=None, pp=None, - close=True, nslices=20, dpi=700): - """ Generate a plot of sky subtraction residuals - Typically for a given channel - Args: - wave: - sky_flux: - sky_res: - outfile: - pp: - close: - - Returns: - - """ - # Start the plot - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(2,1) - - # Wavelength - ax_wave = plt.subplot(gs[0]) - desiu_p.plot_slices(sky_wave, sky_res, np.min(sky_wave), np.max(sky_wave), - 0., num_slices=nslices, axis=ax_wave, scatter=False) - ax_wave.set_xlabel('Wavelength') - ax_wave.set_ylabel('Residual Flux') - - # Wavelength - ax_flux = plt.subplot(gs[1]) - desiu_p.plot_slices(sky_flux, sky_res, np.min(sky_flux), np.max(sky_flux), - 0., num_slices=nslices, axis=ax_flux, set_ylim_from_stats=True, scatter=False) - ax_flux.set_xlabel('log10(Sky Flux)') - ax_flux.set_ylabel('Residual Flux') - #ax_flux.set_ylim(-600, 100) - - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - if outfile is not None: - plt.savefig(outfile, dpi=dpi) - if close: - plt.close() - elif pp is not None: - pp.savefig() - if close: - plt.close() - pp.close() - else: # Show - plt.show() - -def skysub_resid_series(sky_dict, xtype, outfile=None, pp=None, - close=True, nslices=20, dpi=700): - """ Generate a plot of sky subtraction residuals for a series of inputs - Typically for a given channel - Args: - wave: - sky_flux: - sky_res: - outfile: - pp: - close: - - Returns: - - """ - # Start the plot - fig = plt.figure(figsize=(8, 5.0)) - gs = gridspec.GridSpec(sky_dict['count'],1) - - for kk in range(sky_dict['count']): - sky_wave = sky_dict['wave'][kk] - sky_res = sky_dict['res'][kk] - sky_flux = sky_dict['skyflux'][kk] - ax = plt.subplot(gs[kk]) - #ax.set_ylabel('Residual Flux') - if xtype == 'wave': # Wavelength - desiu_p.plot_slices(sky_wave, sky_res, np.min(sky_wave), np.max(sky_wave), - 0., num_slices=nslices, axis=ax, scatter=False) - xlbl = 'Wavelength' - elif xtype == 'flux': # Flux - xlbl = 'log10(Sky Flux)' - desiu_p.plot_slices(sky_flux, sky_res, np.min(sky_flux), np.max(sky_flux), - 0., num_slices=nslices, axis=ax, set_ylim_from_stats=True, scatter=False) - if kk == sky_dict['count']-1: - ax.set_xlabel('Wavelength') - else: - ax.get_xaxis().set_ticks([]) - if kk == sky_dict['count']-1: - ax.set_xlabel(xlbl) - else: - ax.get_xaxis().set_ticks([]) - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - if outfile is not None: - plt.savefig(outfile, dpi=dpi) - if close: - plt.close() - elif pp is not None: - pp.savefig() - if close: - plt.close() - pp.close() - else: # Show - plt.show() - -def skysub_gauss(sky_wave, sky_flux, sky_res, sky_ivar, outfile=None, pp=None, - close=True, binsz=0.1, dpi=700, nfbin=4): - """ Generate a plot examining the Gaussianity of the residuals - Typically for a given channel - Args: - wave: - sky_flux: - sky_res: - sky_ivar: - outfile: - pp: - close: - - Returns: - - """ - from scipy.stats import norm - # Deviates - gd_res = sky_ivar > 0. - devs = sky_res[gd_res] * np.sqrt(sky_ivar[gd_res]) - - # Start the plot - fig = plt.figure(figsize=(8, 4.0)) - gs = gridspec.GridSpec(1,2) - - # Histogram :: Same routine as in frame_skyresid - ax0 = plt.subplot(gs[0]) - i0, i1 = int( np.min(devs) / binsz) - 1, int( np.max(devs) / binsz) + 1 - rng = tuple(binsz*np.array([i0,i1]) ) - nbin = i1-i0 - hist, edges = np.histogram(devs, range=rng, bins=nbin) - - xhist = (edges[1:] + edges[:-1])/2. - ax0.hist(xhist, color='blue', bins=edges, weights=hist)#, histtype='step') - # PDF for Gaussian - area = binsz * np.sum(hist) - - xppf = np.linspace(scipy.stats.norm.ppf(0.000001), scipy.stats.norm.ppf(0.999999), 10000) - ax0.plot(xppf, area*scipy.stats.norm.pdf(xppf), 'r-', alpha=1.0) - ax0.set_xlabel(r'Res/$\sigma$') - ax0.set_ylabel('N') - - # Deviates vs. flux - absdevs = np.abs(devs) - asrt = np.argsort(absdevs) - absdevs.sort() - ndev = devs.size - ax1 = plt.subplot(gs[1]) - - # All - xlim = (0., np.max(absdevs)) - ylim = (0.000001, 1.) - ax1.plot(absdevs, 1-np.arange(ndev)/(ndev-1), 'k', label='All') - - # Bin by sky flux - sflux = sky_flux[asrt] - sky_flux.sort() - fbins = [0.] + [sky_flux[int(ii*ndev/nfbin)] for ii in range(1,nfbin)] - fbins += [np.max(sky_flux)] - # Adjust last bin to be likely on sky lines - if np.max(sky_flux) > 2000.: # Am assuming 2000 counts is a skyline - fbins[-2] = max(2000., fbins[-2]) - # Digitize - f_i = np.digitize(sflux, fbins) - 1 - - for kk in range(nfbin): - lbl = 'flux = [{:d},{:d}]'.format(int(fbins[kk]),int(fbins[kk+1])) - idx = f_i == kk - ncut = np.sum(idx) - ax1.plot(absdevs[idx], 1-np.arange(ncut)/(ncut-1), '--', label=lbl) - - # Gauss lines - for kk in range(1,int(xlim[1])+1): - ax1.plot([kk]*2, ylim, ':', color='gray') - icl = norm.cdf(kk) - norm.cdf(-1*kk) # Area under curve - ax1.plot(xlim, [1-icl]*2, ':', color='gray') - ax1.text(0.2, 1-icl, '{:d}'.format(kk)+r'$\sigma$', color='gray') - - ax1.set_xlabel(r'Res/$\sigma$') - ax1.set_ylabel(r'Fraction greater than Res/$\sigma$') - ax1.set_yscale("log", nonposy='clip') - ax1.set_ylim(ylim) - - legend = ax1.legend(loc='lower left', borderpad=0.3, - handletextpad=0.3, fontsize='small') - - - # Finish - plt.tight_layout(pad=0.1,h_pad=0.0,w_pad=0.0) - if outfile is not None: - plt.savefig(outfile, dpi=dpi) - if close: - plt.close() - elif pp is not None: - pp.savefig() - if close: - plt.close() - pp.close() - else: # Show - plt.show() - - -def get_channel_clrs(): - """ Simple dict to organize styles for channels - Returns: - channel_dict: dict - """ - return dict(b='blue', r='red', z='purple') diff --git a/deprecated/py/desispec/qa/qa_plots_ql.py b/deprecated/py/desispec/qa/qa_plots_ql.py deleted file mode 100644 index cc51b4654..000000000 --- a/deprecated/py/desispec/qa/qa_plots_ql.py +++ /dev/null @@ -1,756 +0,0 @@ -""" -desispec.qa.qa_plots_ql -======================= - -This includes routines to make pdf plots on the qa outputs from quicklook. - -For information on QA dictionaries used here as input, visit wiki page: -https://desi.lbl.gov/trac/wiki/Pipeline/QuickLook/QuicklookQAOutputs/Science -""" - -import numpy as np -from matplotlib import pyplot as plt -from matplotlib.gridspec import GridSpec -from matplotlib.ticker import FormatStrFormatter - -from desispec.qa import qalib -from desispec.qa.qalib import s2n_funcs -from desispec.quicklook.ql_plotlib import ql_qaplot - -def plot_countspectralbins(qa_dict,outfile,plotconf=None,hardplots=False): - """ - Plot count spectral bins. - - Args: - qa_dict: dictionary of qa outputs from running qa_quicklook.CountSpectralBins - outfile: Name of figure. - """ - #SE: this has become a useless plot for showing a constant number now---> prevented creating it for now until there is an actual plan for what to plot - camera = qa_dict["CAMERA"] - expid=qa_dict["EXPID"] - paname=qa_dict["PANAME"] - - thrcut=qa_dict["PARAMS"]["CUTBINS"] - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - plt.suptitle("Fiber level check for flux after {}, Camera: {}, ExpID: {}".format(paname,camera,expid),fontsize=10,y=0.99) - goodfib=qa_dict["METRICS"]["GOOD_FIBERS"] - ngoodfib=qa_dict["METRICS"]["NGOODFIB"] - plt.plot(goodfib) - plt.ylim(-0.1,1.1) - plt.xlabel('Fiber #',fontsize=10) - plt.text(-0.5,1,r"NGOODFIB=%i"%(ngoodfib),ha='left',va='top',fontsize=10,alpha=2) - """ - gs=GridSpec(7,6) - ax1=fig.add_subplot(gs[:,:2]) - ax2=fig.add_subplot(gs[:,2:4]) - ax3=fig.add_subplot(gs[:,4:]) - - hist_med=ax1.bar(index,binslo,color='b',align='center') - ax1.set_xlabel('Fiber #',fontsize=10) - ax1.set_ylabel('Photon Counts > {:d}'.format(cutlo),fontsize=10) - ax1.tick_params(axis='x',labelsize=10) - ax1.tick_params(axis='y',labelsize=10) - ax1.set_xlim(0) - - hist_med=ax2.bar(index,binsmed,color='r',align='center') - ax2.set_xlabel('Fiber #',fontsize=10) - ax2.set_ylabel('Photon Counts > {:d}'.format(cutmed),fontsize=10) - ax2.tick_params(axis='x',labelsize=10) - ax2.tick_params(axis='y',labelsize=10) - ax2.set_xlim(0) - - hist_med=ax3.bar(index,binshi,color='g',align='center') - ax3.set_xlabel('Fiber #',fontsize=10) - ax3.set_ylabel('Photon Counts > {:d}'.format(cuthi),fontsize=10) - ax3.tick_params(axis='x',labelsize=10) - ax3.tick_params(axis='y',labelsize=10) - ax3.set_xlim(0) - """ - plt.tight_layout() - fig.savefig(outfile) - -def plot_countpix(qa_dict,outfile,plotconf=None,hardplots=False): - - """ - Plot pixel counts above some threshold - - Args: - qa_dict: qa dictionary from countpix qa - outfile: pdf file of the plot - """ - from desispec.util import set_backend - _matplotlib_backend = None - set_backend() - - expid=qa_dict["EXPID"] - camera = qa_dict["CAMERA"] - paname=qa_dict["PANAME"] - #npix_amp=np.array(qa_dict["METRICS"]["NPIX_AMP"]) - litfrac=np.array(qa_dict["METRICS"]["LITFRAC_AMP"]) - - cutthres=qa_dict["PARAMS"]["CUTPIX"] - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - plt.suptitle("Fraction of pixels lit after {}, Camera: {}, ExpID: {}".format(paname,camera,expid),fontsize=10,y=0.99) - #ax1=fig.add_subplot(211) - #heatmap1=ax1.pcolor(npix_amp.reshape(2,2),cmap=plt.cm.OrRd) - ##plt.title('Total Pixels > {:d} sigma = {:f}'.format(cutthres,countlo), fontsize=10) - #ax1.set_xlabel("# pixels > {:d} sigma (per Amp)".format(cutthres),fontsize=10) - #ax1.tick_params(axis='x',labelsize=10,labelbottom=False) - #ax1.tick_params(axis='y',labelsize=10,labelleft=False) - # ax1.annotate("Amp 1\n{:f}".format(npix_amp[0]), - # xy=(0.4,0.4), - # fontsize=10 - # ) - #ax1.annotate("Amp 2\n{:f}".format(npix_amp[1]), - # xy=(1.4,0.4), - # fontsize=10 - # ) - #ax1.annotate("Amp 3\n{:f}".format(npix_amp[2]), - # xy=(0.4,1.4), - # fontsize=10 - # ) - #ax1.annotate("Amp 4\n{:f}".format(npix_amp[3]), - # xy=(1.4,1.4), - # fontsize=10 - # ) - ax2=fig.add_subplot(111) - heatmap2=ax2.pcolor(litfrac.reshape(2,2),cmap=plt.cm.OrRd) - ax2.set_xlabel("Fraction over {:d} sigma read noise(per Amp)".format(cutthres),fontsize=10) - ax2.tick_params(axis='x',labelsize=10,labelbottom=False) - ax2.tick_params(axis='y',labelsize=10,labelleft=False) - ax2.annotate("Amp 1\n{:f}".format(litfrac[0]), - xy=(0.4,0.4), - fontsize=10 - ) - ax2.annotate("Amp 2\n{:f}".format(litfrac[1]), - xy=(1.4,0.4), - fontsize=10 - ) - ax2.annotate("Amp 3\n{:f}".format(litfrac[2]), - xy=(0.4,1.4), - fontsize=10 - ) - ax2.annotate("Amp 4\n{:f}".format(litfrac[3]), - xy=(1.4,1.4), - fontsize=10 - ) - plt.tight_layout() - fig.savefig(outfile) - -def plot_bias_overscan(qa_dict,outfile,plotconf=None,hardplots=False): - - """ - Map of bias from overscan from 4 regions of CCD - - Args: - qa_dict: qa dictionary from bias_from_overscan qa - outfile : pdf file of the plot - """ - expid = qa_dict["EXPID"] - camera = qa_dict["CAMERA"] - paname = qa_dict["PANAME"] - params = qa_dict["PARAMS"] - exptime = qa_dict["EXPTIME"] - if exptime == 0.: - exptime = 1. - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - title="Bias from overscan region after {}, Camera: {}, ExpID: {}".format(paname,camera,expid) - plt.suptitle(title,fontsize=10,y=0.99) - ax1=fig.add_subplot(111) - ax1.set_xlabel("Avg. bias value per Amp (photon counts)",fontsize=10) - bias_amp=qa_dict["METRICS"]["BIAS_AMP"] - - heatmap1=ax1.pcolor(bias_amp.reshape(2,2),cmap=plt.cm.OrRd) - ax1.tick_params(axis='x',labelsize=10,labelbottom=False) - ax1.tick_params(axis='y',labelsize=10,labelleft=False) - ax1.annotate("Amp 1\n{:.3f}".format(bias_amp[0]/exptime), - xy=(0.4,0.4), - fontsize=10 - ) - ax1.annotate("Amp 2\n{:.3f}".format(bias_amp[1]/exptime), - xy=(1.4,0.4), - fontsize=10 - ) - ax1.annotate("Amp 3\n{:.3f}".format(bias_amp[2]/exptime), - xy=(0.4,1.4), - fontsize=10 - ) - ax1.annotate("Amp 4\n{:.3f}".format(bias_amp[3]/exptime), - xy=(1.4,1.4), - fontsize=10 - ) - fig.savefig(outfile) - -def plot_XWSigma(qa_dict,outfile,plotconf=None,hardplots=False): - - """ - Plot XWSigma - - Args: - qa_dict: qa dictionary from countpix qa - outfile : file of the plot - """ - camera=qa_dict["CAMERA"] - expid=qa_dict["EXPID"] - pa=qa_dict["PANAME"] - xsigma=np.array(qa_dict["METRICS"]["XWSIGMA_FIB"][0]) - wsigma=np.array(qa_dict["METRICS"]["XWSIGMA_FIB"][1]) - xsigma_med=qa_dict["METRICS"]["XWSIGMA"][0] - wsigma_med=qa_dict["METRICS"]["XWSIGMA"][1] - xfiber=np.arange(xsigma.shape[0]) - wfiber=np.arange(wsigma.shape[0]) - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - plt.suptitle("X & W Sigma over sky peaks, Camera: {}, ExpID: {}".format(camera,expid),fontsize=10,y=0.99) - - ax1=fig.add_subplot(221) - hist_x=ax1.bar(xfiber,xsigma,align='center') - ax1.set_xlabel("Fiber #",fontsize=10) - ax1.set_ylabel("X std. dev. (# of pixels)",fontsize=10) - ax1.tick_params(axis='x',labelsize=10) - ax1.tick_params(axis='y',labelsize=10) - plt.xlim(0,len(xfiber)) - - ax2=fig.add_subplot(222) - hist_w=ax2.bar(wfiber,wsigma,align='center') - ax2.set_xlabel("Fiber #",fontsize=10) - ax2.set_ylabel("W std. dev. (# of pixels)",fontsize=10) - ax2.tick_params(axis='x',labelsize=10) - ax2.tick_params(axis='y',labelsize=10) - plt.xlim(0,len(wfiber)) - - if "XWSIGMA_AMP" in qa_dict["METRICS"]: - xsigma_amp=qa_dict["METRICS"]["XWSIGMA_AMP"][0] - wsigma_amp=qa_dict["METRICS"]["XWSIGMA_AMP"][1] - ax3=fig.add_subplot(223) - heatmap3=ax3.pcolor(xsigma_amp.reshape(2,2),cmap=plt.cm.OrRd) - plt.title('X Sigma = {:.4f}'.format(xsigma_med), fontsize=10) - ax3.set_xlabel("X std. dev. per Amp (# of pixels)",fontsize=10) - ax3.tick_params(axis='x',labelsize=10,labelbottom=False) - ax3.tick_params(axis='y',labelsize=10,labelleft=False) - ax3.annotate("Amp 1\n{:.3f}".format(xsigma_amp[0]), - xy=(0.4,0.4), - fontsize=10 - ) - ax3.annotate("Amp 2\n{:.3f}".format(xsigma_amp[1]), - xy=(1.4,0.4), - fontsize=10 - ) - ax3.annotate("Amp 3\n{:.3f}".format(xsigma_amp[2]), - xy=(0.4,1.4), - fontsize=10 - ) - ax3.annotate("Amp 4\n{:.3f}".format(xsigma_amp[3]), - xy=(1.4,1.4), - fontsize=10 - ) - - ax4=fig.add_subplot(224) - heatmap4=ax4.pcolor(wsigma_amp.reshape(2,2),cmap=plt.cm.OrRd) - plt.title('W Sigma = {:.4f}'.format(wsigma_med), fontsize=10) - ax4.set_xlabel("W std. dev. per Amp (# of pixels)",fontsize=10) - ax4.tick_params(axis='x',labelsize=10,labelbottom=False) - ax4.tick_params(axis='y',labelsize=10,labelleft=False) - ax4.annotate("Amp 1\n{:.3f}".format(wsigma_amp[0]), - xy=(0.4,0.4), - fontsize=10 - ) - ax4.annotate("Amp 2\n{:.3f}".format(wsigma_amp[1]), - xy=(1.4,0.4), - fontsize=10 - ) - ax4.annotate("Amp 3\n{:.3f}".format(wsigma_amp[2]), - xy=(0.4,1.4), - fontsize=10 - ) - ax4.annotate("Amp 4\n{:.3f}".format(wsigma_amp[3]), - xy=(1.4,1.4), - fontsize=10 - ) - - plt.tight_layout() - fig.savefig(outfile) - -def plot_RMS(qa_dict,outfile,plotconf=None,hardplots=False): - """ - Plot RMS - - Args: - qa_dict: dictionary of qa outputs from running qa_quicklook.Get_RMS - outfile: Name of plot output file - """ - camera=qa_dict["CAMERA"] - expid=qa_dict["EXPID"] - pa=qa_dict["PANAME"] - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - title="NOISE image counts per amplifier, Camera: {}, ExpID: {}".format(camera,expid) - rms_amp=qa_dict["METRICS"]["NOISE_AMP"] - ax1=fig.add_subplot(211) - - rms_over_amp=qa_dict["METRICS"]["NOISE_OVERSCAN_AMP"] - - plt.suptitle(title,fontsize=10,y=0.99) - heatmap1=ax1.pcolor(rms_amp.reshape(2,2),cmap=plt.cm.OrRd) - # ax1.set_xlabel("NOISE per Amp (photon counts)",fontsize=10) - ax1.tick_params(axis='x',labelsize=10,labelbottom=False) - ax1.tick_params(axis='y',labelsize=10,labelleft=False) - ax1.annotate("Amp 1\n{:.3f}".format(rms_amp[0]), - xy=(0.4,0.4), - fontsize=10 - ) - ax1.annotate("Amp 2\n{:.3f}".format(rms_amp[1]), - xy=(1.4,0.4), - fontsize=10 - ) - ax1.annotate("Amp 3\n{:.3f}".format(rms_amp[2]), - xy=(0.4,1.4), - fontsize=10 - ) - ax1.annotate("Amp 4\n{:.3f}".format(rms_amp[3]), - xy=(1.4,1.4), - fontsize=10 - ) - ax2=fig.add_subplot(212) - heatmap2=ax2.pcolor(rms_over_amp.reshape(2,2),cmap=plt.cm.OrRd) - ax2.set_xlabel("NOISE Overscan per Amp (photon counts)",fontsize=10) - ax2.tick_params(axis='x',labelsize=10,labelbottom=False) - ax2.tick_params(axis='y',labelsize=10,labelleft=False) - ax2.annotate("Amp 1\n{:.3f}".format(rms_over_amp[0]), - xy=(0.4,0.4), - fontsize=10 - ) - ax2.annotate("Amp 2\n{:.3f}".format(rms_over_amp[1]), - xy=(1.4,0.4), - fontsize=10 - ) - ax2.annotate("Amp 3\n{:.3f}".format(rms_over_amp[2]), - xy=(0.4,1.4), - fontsize=10 - ) - ax2.annotate("Amp 4\n{:.3f}".format(rms_over_amp[3]), - xy=(1.4,1.4), - fontsize=10 - ) - fig.savefig(outfile) - -def plot_integral(qa_dict,outfile,plotconf=None,hardplots=False): - import matplotlib.ticker as ticker - """ - Plot integral. - - Args: - qa_dict: qa dictionary - outfile : output plot file - """ - expid=qa_dict["EXPID"] - camera=qa_dict["CAMERA"] - paname=qa_dict["PANAME"] - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - ax1=fig.add_subplot(111) - integral=np.array(qa_dict["METRICS"]["SPEC_MAGS"]) - plt.suptitle("Integrated Spectral Magnitudes, Camera: {}, ExpID: {}".format(paname,camera,expid),fontsize=10,y=0.99) - index=np.arange(len(integral)) - hist_med=ax1.bar(index,integral,color='b',align='center') - ax1.set_xlabel('Fibers',fontsize=10) - ax1.set_ylabel('Integral (photon counts)',fontsize=10) - ax1.tick_params(axis='x',labelsize=10) - ax1.tick_params(axis='y',labelsize=10) - ax1.xaxis.set_major_locator(ticker.AutoLocator()) - #ax1.set_xticklabels(std_fiberid) - - plt.tight_layout() - fig.savefig(outfile) - -def plot_sky_continuum(qa_dict,outfile,plotconf=None,hardplots=False): - - """ - Plot mean sky continuum from lower and higher wavelength range for each - fiber and accross amps. - - Args: - qa_dict: dictionary from sky continuum QA - outfile: pdf file to save the plot - """ - expid=qa_dict["EXPID"] - camera=qa_dict["CAMERA"] - paname=qa_dict["PANAME"] - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - title="Mean Sky Continuum after {}, Camera: {}, ExpID: {}".format(paname,camera,expid) - xtitle="SKY fiber ID" - ytitle="Sky Continuum (photon counts)" - skycont_fiber=np.array(qa_dict["METRICS"]["SKYCONT_FIBER"]) - fiberid=qa_dict["METRICS"]["SKYFIBERID"] - plt.suptitle(title,fontsize=10,y=0.99) - - ax1=fig.add_subplot(111) - index=np.arange(len(skycont_fiber)) - hist_med=ax1.bar(index,skycont_fiber,color='b',align='center') - ax1.set_xlabel(xtitle,fontsize=10) - ax1.set_ylabel(ytitle,fontsize=10) - ax1.tick_params(axis='x',labelsize=6) - ax1.tick_params(axis='y',labelsize=10) - ax1.set_xticks(index) - ax1.set_xticklabels(fiberid) - ax1.set_xlim(0) - - plt.tight_layout() - fig.savefig(outfile) - -def plot_sky_peaks(qa_dict,outfile,plotconf=None,hardplots=False): - - """ - Plot rms of sky peaks for smy fibers across amps - - Args: - qa_dict: dictionary from sky peaks QA - outfile: pdf file to save the plot - """ - - - expid=qa_dict["EXPID"] - camera=qa_dict["CAMERA"] - paname=qa_dict["PANAME"] - sumcount=qa_dict["METRICS"]["PEAKCOUNT_FIB"] - fiber=np.arange(sumcount.shape[0]) - skyfiber_rms=qa_dict["METRICS"]["PEAKCOUNT_NOISE"] - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - plt.suptitle("Counts for Sky Fibers after {}, Camera: {}, ExpID: {}".format(paname,camera,expid),fontsize=10,y=0.99) - - ax1=fig.add_subplot(111) - hist_x=ax1.bar(fiber,sumcount,align='center') - ax1.set_xlabel("Fiber #",fontsize=10) - ax1.set_ylabel("Summed counts over sky peaks (photon counts)",fontsize=10) - ax1.tick_params(axis='x',labelsize=10) - ax1.tick_params(axis='y',labelsize=10) - plt.xlim(0,len(fiber)) - - plt.tight_layout() - fig.savefig(outfile) - -def plot_residuals(frame,qa_dict,outfile,plotconf=None,hardplots=False): - import random - """ - Plot one random sky subtracted, fiber flattened spectrum per object type - - Args: - frame: sframe object - qa_dict: qa dictionary - outfile : output plot file - """ - - expid=qa_dict["EXPID"] - camera = qa_dict["CAMERA"] - paname=qa_dict["PANAME"] - med_resid_fiber=qa_dict["METRICS"]["MED_RESID_FIBER"] - med_resid_wave=qa_dict["METRICS"]["MED_RESID_WAVE"] - wavelength=qa_dict["METRICS"]["WAVELENGTH"] - flux=frame.flux - objects=frame.fibermap["OBJTYPE"] - objtypes=list(set(objects)) - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - plt.suptitle('Randomly selected sky subtracted, fiber flattenend spectra\ncamera {}, exposure, {}'.format(camera,expid),fontsize=10) - - for i in range(len(objtypes)): - ax=fig.add_subplot('23{}'.format(i+1)) - - objs=np.where(objects==objtypes[i])[0] - obj=random.choice(objs) - objflux=flux[obj] - - ax.set_xlabel('Wavelength (Angstroms)',fontsize=8) - ax.set_ylabel('{} Flux (counts)'.format(objtypes[i]),fontsize=8) - ax.tick_params(axis='x',labelsize=8) - ax.tick_params(axis='y',labelsize=8) - ax.plot(wavelength,objflux) - - plt.tight_layout() - plt.subplots_adjust(top=0.9) - - # gs=GridSpec(6,4) - # plt.suptitle("Sky Residuals after {}, Camera: {}, ExpID: {}".format(paname,camera,expid)) - # - # ax0=fig.add_subplot(gs[:2,2:]) - # ax0.set_axis_off() - # keys=["MED_RESID","NBAD_PCHI","NREJ","NSKY_FIB","RESID_PER"] - # skyfiberid=qa_dict["METRICS"]["SKYFIBERID"] - # - # xl=0.05 - # yl=0.9 - # for key in keys: - # ax0.text(xl,yl,key+': '+str(qa_dict["METRICS"][key]),transform=ax0.transAxes,ha='left',fontsize='x-small') - # yl=yl-0.1 - # - # ax1=fig.add_subplot(gs[:2,:2]) - # ax1.plot(wavelength, med_resid_wave,'b') - # ax1.set_ylabel("Med. Sky Res. (photon counts)",fontsize=10) - # ax1.set_xlabel("Wavelength(A)",fontsize=10) - # ax1.set_ylim(np.percentile(med_resid_wave,2.5),np.percentile(med_resid_wave,97.5)) - # ax1.set_xlim(np.min(wavelength),np.max(wavelength)) - # ax1.tick_params(axis='x',labelsize=10) - # ax1.tick_params(axis='y',labelsize=10) - # - # ax2=fig.add_subplot(gs[3:,:]) - # index=range(med_resid_fiber.shape[0]) - # hist_res=ax2.bar(index,med_resid_fiber,align='center') - # ax2.plot(index,np.zeros_like(index),'k-') - # #ax1.plot(index,med_resid_fiber,'bo') - # ax2.set_xlabel('Sky fiber ID',fontsize=10) - # ax2.set_ylabel('Med. Sky Res. (photon counts)',fontsize=10) - # ax2.tick_params(axis='x',labelsize=10) - # ax2.tick_params(axis='y',labelsize=10) - # ax2.set_xticks(index) - # ax2.set_xticklabels(skyfiberid) - # ax2.set_xlim(0) - # #plt.tight_layout() - - fig.savefig(outfile) - - -def plot_SNR(qa_dict,outfile,objlist,fitsnr,rescut=0.2,sigmacut=2.,plotconf=None,hardplots=False): - """ - Plot SNR - - Args: - qa_dict: dictionary of qa outputs from running qa_quicklook.Calculate_SNR - outfile: output png file - objlist: list of objtype for log(snr**2) vs. mag plots - badfibs: list of fibers with infs or nans to remove for plotting - fitsnr: list of snr vs. mag fitting coefficients # JXP -- THIS IS NOT TRUE!! - rescut: only plot residuals (+/-) less than rescut (default 0.2) - sigmacut: only plot residuals (+/-) less than sigma cut (default 2.0) - NOTE: rescut taken as default cut parameter - """ - med_snr=np.array(qa_dict["METRICS"]["MEDIAN_SNR"]) - avg_med_snr=np.mean(med_snr) - index=np.arange(med_snr.shape[0]) - resids= np.array(qa_dict["METRICS"]["SNR_RESID"]) - camera = qa_dict["CAMERA"] - expid=qa_dict["EXPID"] - paname=qa_dict["PANAME"] - - fig=plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - ra=[] - dec=[] - mags=[] - snrs=[] - # Loop over object types - for oid, otype in enumerate(objlist): - mag=qa_dict["METRICS"]["SNR_MAG_TGT"][oid][1] - snr=qa_dict["METRICS"]["SNR_MAG_TGT"][oid][0] - mags.append(mag) - snrs.append(snr) - - fibers = qa_dict['METRICS']['%s_FIBERID'%otype] - for c in range(len(fibers)): - ras = qa_dict['METRICS']['RA'][fibers[c]] - decs = qa_dict['METRICS']['DEC'][fibers[c]] - ra.append(ras) - dec.append(decs) - - if rescut is None and sigmacut is not None: - range_min = np.mean(resids) - sigmacut * np.std(resids) - range_max = np.mean(resids) + sigmacut * np.std(resids) - for ii in range(len(resids)): - if resids[ii] <= range_min: - resids[ii] = range_min - elif resids[ii] >= range_max: - resids[ii] = range_max - - if camera[0] == 'b': - thisfilter='DECAM_G' - elif camera[0] == 'r': - thisfilter='DECAM_R' - else: - thisfilter='DECAM_Z' - - plt.suptitle("Signal/Noise after {}, Camera: {}, ExpID: {}".format(paname,camera,expid),fontsize=10,y=0.99) - - rmneg=med_snr[med_snr>=0.] - rmind=index[med_snr>=0.] - - ax1=fig.add_subplot(221) - hist_med=ax1.semilogy(rmind,rmneg,linewidth=1) - ax1.set_xlabel('Fiber #',fontsize=6) - ax1.set_ylabel('Median S/N',fontsize=8) - ax1.tick_params(axis='x',labelsize=6) - ax1.tick_params(axis='y',labelsize=6) - ax1.set_xlim(0) - - ax2=fig.add_subplot(222) - ax2.set_title('Residual SNR: (calculated SNR - fit SNR) / fit SNR',fontsize=8) - ax2.set_xlabel('RA',fontsize=6) - ax2.set_ylabel('DEC',fontsize=6) - ax2.tick_params(axis='x',labelsize=6) - ax2.tick_params(axis='y',labelsize=6) - if rescut is not None: - resid_plot=ax2.scatter(ra,dec,s=2,c=resids,cmap=plt.cm.bwr,vmin=-rescut,vmax=rescut) - fig.colorbar(resid_plot,ticks=[-rescut,0.,rescut]) - else: - resid_plot=ax2.scatter(ra,dec,s=2,c=resids,cmap=plt.cm.bwr) - fig.colorbar(resid_plot,ticks=[np.min(resids),0,np.max(resids)]) - - for i,otype in enumerate(objlist): - ax=fig.add_subplot('24{}'.format(i+5)) - - objtype=objlist[i] - objid=np.where(np.array(objlist)==objtype)[0][0] - obj_mag=mags[objid] - obj_snr=snrs[objid] - plot_mag=sorted(obj_mag) - #plot_fit=np.array(fitsnr[objid])**2 - snr2=np.array(obj_snr)**2 - fitval=qa_dict["METRICS"]["FITCOEFF_TGT"][objid] - - # Calculate the model - flux = 10 ** (-0.4 * (np.array(plot_mag) - 22.5)) - funcMap = s2n_funcs(exptime=qa_dict['METRICS']['EXPTIME']) - fitfunc = funcMap['astro'] - plot_fit = fitfunc(flux, *fitval) - - # Plot - if i == 0: - ax.set_ylabel('Median S/N**2',fontsize=8) - ax.set_xlabel('{} Mag ({})\na={:.4f}, B={:.1f}'.format(objtype,thisfilter,fitval[0],fitval[1]),fontsize=6) - if otype == 'STAR': - ax.set_xlim(16,20) - elif otype == 'BGS' or otype == 'MWS': - ax.set_xlim(14,24) - elif otype == 'QSO': - ax.set_xlim(17,23) - else: - ax.set_xlim(20,25) - ax.tick_params(axis='x',labelsize=6) - ax.tick_params(axis='y',labelsize=6) - ax.semilogy(obj_mag,snr2,'b.',markersize=1) - ax.semilogy(plot_mag,plot_fit**2,'y',linewidth=1) - - fig.savefig(outfile) - -def plot_lpolyhist(qa_dict,outfile,plotconf=None,hardplots=False): - """ - Plot histogram for each legendre polynomial coefficient in WSIGMA array. - - Args: - qa_dict: Dictionary of qa outputs from running qa_quicklook.Check_Resolution - outfile: Name of figure. - """ - paname = qa_dict["PANAME"] - p0 = qa_dict["DATA"]["LPolyCoef0"] - p1 = qa_dict["DATA"]["LPolyCoef1"] - p2 = qa_dict["DATA"]["LPolyCoef2"] - - fig = plt.figure() - - if plotconf: - hardplots=ql_qaplot(fig,plotconf,qa_dict,camera,expid,outfile) - - if not hardplots: - pass - else: - plt.suptitle("{} QA Legendre Polynomial Coefficient Histograms".format(paname)) - - # Creating subplots - ax1 = fig.add_subplot(311) - n1, bins1, patches1 = ax1.hist(p0, bins=20, ec='black') - ax1.set_xticks(bins1[::3]) - ax1.xaxis.set_major_formatter(FormatStrFormatter('%0.3f')) - ax1.set_xlabel('Zeroth Legendre Polynomial Coefficient (p0)') - ax1.set_ylabel('Frequency') - - ax2 = fig.add_subplot(312) - n2, bins2, patches2 = ax2.hist(p1, bins=20, ec='black') - ax2.set_xticks(bins2[::3]) - ax2.xaxis.set_major_formatter(FormatStrFormatter('%0.3f')) - ax2.set_xlabel('First Legendre Polynomial Coefficient (p1)') - ax2.set_ylabel('Frequency') - - ax3 = fig.add_subplot(313) - n3, bins3, patches3 = ax3.hist(p2, bins=20, ec='black') - ax3.set_xticks(bins3[::3]) - ax3.xaxis.set_major_formatter(FormatStrFormatter('%0.3f')) - ax3.set_xlabel('Second Legendre Polynomial Coefficient (p2)') - ax3.set_ylabel('Frequency') - - plt.tight_layout() - plt.subplots_adjust(top=0.92) - - fig.savefig(outfile) - diff --git a/deprecated/py/desispec/qa/qa_prod.py b/deprecated/py/desispec/qa/qa_prod.py deleted file mode 100644 index b8e275767..000000000 --- a/deprecated/py/desispec/qa/qa_prod.py +++ /dev/null @@ -1,121 +0,0 @@ -""" -desispec.qa.qa_prod -=================== - -Class to organize QA for a full DESI production run. -""" - -from __future__ import print_function, absolute_import, division - -import numpy as np -import glob, os -import warnings - -from desispec.io import get_exposures -from desispec.io import get_files -from desispec.io import specprod_root -from desispec.io import get_nights -from .qa_multiexp import QA_MultiExp -from .qa_night import QA_Night -from desispec.io import write_qa_exposure - -from desiutil.log import get_logger - -# log = get_logger() - -from . import qa_multiexp - -class QA_Prod(qa_multiexp.QA_MultiExp): - def __init__(self, specprod_dir=None, **kwargs): - """ Class to organize and execute QA for a DESI production - - Args: - specprod_dir(str): Path containing the exposures/ directory to use. If the value - is None, then the value of :func:`specprod_root` is used instead. - Notes: - - Attributes: - qa_exps : list - List of QA_Exposure classes, one per exposure in production - data : dict - """ - if specprod_dir is None: - specprod_dir = specprod_root() - self.specprod_dir = specprod_dir - # Init - QA_MultiExp.__init__(self, specprod_dir=specprod_dir, **kwargs) - # Load up exposures for the full production - nights = get_nights(specprod_dir=self.specprod_dir) - for night in nights: - self.mexp_dict[night] = {} - for exposure in get_exposures(night, specprod_dir = self.specprod_dir): - # Object only?? - frames_dict = get_files(filetype = str('frame'), night = night, - expid = exposure, specprod_dir = self.specprod_dir) - self.mexp_dict[night][exposure] = frames_dict - # Output file names - self.qaexp_outroot = self.qaprod_dir+'/'+self.prod_name+'_qa' - # Nights list - self.qa_nights = [] - - def load_data(self, inroot=None): - """ Load QA data from night objects on disk - """ - self.data = {} - # Load - for night in self.mexp_dict.keys(): - qaNight = QA_Night(night, specprod_dir=self.specprod_dir, qaprod_dir=self.qaprod_dir) - qaNight.load_data() - # - self.data[night] = qaNight.data[night] - - def build_data(self): - """ Build QA data dict from the nights - """ - from desiutil.io import combine_dicts - # Loop on exposures - odict = {} - for qanight in self.qa_nights: - for qaexp in qanight.qa_exps: - # Get the exposure dict - idict = write_qa_exposure('foo', qaexp, ret_dict=True) - odict = combine_dicts(odict, idict) - # Finish - self.data = odict - - def slurp_nights(self, make_frameqa=False, remove=True, restrict_nights=None, - write_nights=False, **kwargs): - """ Slurp all the individual QA files, night by night - Loops on nights, generating QANight objects along the way - - Args: - make_frameqa: bool, optional - Regenerate the individual QA files (at the frame level first) - remove: bool, optional - Remove the individual QA files? - restrict_nights: list, optional - **kwargs: - Passed to make_frameqa() - - Returns: - - """ - log = get_logger() - # Remake? - if make_frameqa: - self.make_frameqa(**kwargs) - # Reset - log.info("Resetting QA_Night objects") - self.qa_nights = [] - # Loop on nights - for night in self.mexp_dict.keys(): - if restrict_nights is not None: - if night not in restrict_nights: - continue - qaNight = QA_Night(night, specprod_dir=self.specprod_dir, qaprod_dir=self.qaprod_dir) - qaNight.slurp(remove=remove) - # Save nights - self.qa_nights.append(qaNight) - # Write? - if write_nights: - qaNight.write_qa_exposures() diff --git a/deprecated/py/desispec/qa/qa_quicklook.py b/deprecated/py/desispec/qa/qa_quicklook.py deleted file mode 100644 index 6b6984d2e..000000000 --- a/deprecated/py/desispec/qa/qa_quicklook.py +++ /dev/null @@ -1,2246 +0,0 @@ -""" -desispec.qa.qa_quicklook -======================== - -Monitoring algorithms for Quicklook pipeline. -""" - -import os,sys -import datetime -import numpy as np -import scipy.ndimage -import yaml -import re -import astropy.io.fits as fits -import desispec.qa.qa_plots_ql as plot -import desispec.quicklook.qlpsf -import desispec.qa.qa_plots_ql as fig -from desispec.quicklook.qas import MonitoringAlg, QASeverity -from desispec.quicklook import qlexceptions -from desispec.quicklook import qllogger -from desispec.quicklook.palib import resample_spec -from astropy.time import Time -from desispec.qa import qalib -from desispec.io import qa, read_params -from desispec.io.meta import findfile -from desispec.io.sky import read_sky -from desispec.image import Image as im -from desispec.frame import Frame as fr -from desispec.preproc import parse_sec_keyword -from desispec.util import runcmd -from desispec.qproc.qframe import QFrame -from desispec.fluxcalibration import isStdStar -from desitarget.targetmask import desi_mask -import astropy -from astropy.io import fits - -qlog=qllogger.QLLogger("QuickLook",0) -log=qlog.getlog() - -def get_inputs(*args,**kwargs): - ''' - Get inputs required for each QA - ''' - inputs={} - inputs["camera"]=kwargs["camera"] - - if "paname" not in kwargs: inputs["paname"]=None - else: inputs["paname"]=kwargs["paname"] - - if "ReferenceMetrics" in kwargs: inputs["refmetrics"]=kwargs["ReferenceMetrics"] - else: inputs["refmetrics"]=None - - inputs["amps"]=False - if "amps" in kwargs: inputs["amps"]=kwargs["amps"] - - if "param" in kwargs: inputs["param"]=kwargs["param"] - else: inputs["param"]=None - - inputs["psf"]=None - if "PSFFile" in kwargs: inputs["psf"]=kwargs["PSFFile"] - - inputs["fibermap"]=None - if "FiberMap" in kwargs: inputs["fibermap"]=kwargs["FiberMap"] - - if "Peaks" in kwargs: inputs["Peaks"]=kwargs["Peaks"] - - if "qafile" in kwargs: inputs["qafile"] = kwargs["qafile"] - else: inputs["qafile"]=None - - if "qafig" in kwargs: inputs["qafig"]=kwargs["qafig"] - else: inputs["qafig"]=None - - if "plotconf" in kwargs: inputs["plotconf"]=kwargs["plotconf"] - else: inputs["plotconf"]=None - - if "hardplots" in kwargs: inputs["hardplots"]=kwargs["hardplots"] - else: inputs["hardplots"]=False - - return inputs - -def get_image(filetype,night,expid,camera,specdir): - ''' - Make image object from file if in development mode - ''' - #- Find correct file for QA - imagefile = findfile(filetype,int(night),int(expid),camera,specprod_dir=specdir) - - #- Create necessary input for desispec.image - image = fits.open(imagefile) - pix = image['IMAGE'].data - ivar = image['IVAR'].data - mask = image['MASK'].data - readnoise = image['READNOISE'].data - meta = image['IMAGE'].header - - #- Create image object - imageobj = im(pix,ivar,mask=mask,readnoise=readnoise,camera=camera,meta=meta) - return imageobj - -def get_frame(filetype,night,expid,camera,specdir): - ''' - Make frame object from file if in development mode - ''' - #- Find correct file for QA - framefile = findfile(filetype,int(night),int(expid),camera,specprod_dir=specdir) - - #- Create necessary input for desispec.frame - frame = fits.open(framefile) - wave = frame['WAVE'].data - flux = frame['FLUX'].data - ivar = frame['IVAR'].data - fibermap = frame['FIBERMAP'].data - fibers = fibermap['FIBER'] - meta = frame['FLUX'].header - - #- Create frame object - frameobj = QFrame(wave,flux,ivar,fibers=fibers,fibermap=fibermap,meta=meta) - - return frameobj - - -class Check_HDUs(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="CHECKHDUS" - import astropy - rawtype=astropy.io.fits.hdu.hdulist.HDUList - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "CHECKHDUS" - status=kwargs['statKey'] if 'statKey' in kwargs else "CHECKHDUS_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - MonitoringAlg.__init__(self,name,rawtype,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - - if kwargs["singleqa"] == 'Check_HDUs': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - rawfile = findfile('raw',int(night),int(expid),camera,rawdata_dir=kwargs["rawdir"]) - raw = fits.open(rawfile) - else: raw=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(raw,inputs) - - def run_qa(self,raw,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - - rawimage=raw[camera.upper()].data - header=raw[camera.upper()].header - - retval={} - retval["EXPID"]= '{0:08d}'.format(header["EXPID"]) - retval["CAMERA"] = camera - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["FLAVOR"] = header["FLAVOR"] - #SE: quicklook to crash when a mismatched config file with the one in fits header - from desispec.scripts import quicklook - - args=quicklook.parse() - ad,fl = args.config.split("qlconfig_") - flvr = fl.split(".yaml")[0] - #if flvr in ['darksurvey','graysurvey','brightsurvey']: flvr = 'science' - if header["FLAVOR"] == 'science': - flvr = flvr.split("survey")[0] - if (header["FLAVOR"] == flvr or header["FLAVOR"] == format(flvr.upper()) or flvr == 'test'): - log.info("The correct configuration file is being used!") - else: - log.critical("Wrong configuration file is being used!") - sys.exit("Wrong configuration file! use the one for "+str(header["FLAVOR"])) - - elif (header["FLAVOR"] == flvr or flvr == 'test'): - log.info("The correct configuration file is being used!") - else: - log.critical("Wrong configuration file is being used!") - sys.exit("Wrong configuration file! use the one for "+str(header["FLAVOR"])) - - - if retval["FLAVOR"] == 'science': - retval["PROGRAM"] = header["PROGRAM"] - else: - pass - retval["NIGHT"] = header["NIGHT"] - kwargs=self.config['kwargs'] - - - HDUstat = "NORMAL" - EXPNUMstat = "NORMAL" - - param['EXPTIME'] = header["EXPTIME"] - - if camera != header["CAMERA"]: - log.critical("The raw FITS file is missing camera "+camera) - sys.exit("QuickLook Abort: CHECK THE RAW FITS FILE :"+rawfile) - HDUstat = 'ALARM' - - if header["EXPID"] != kwargs['expid'] : - log.critical("The raw FITS file is missing camera "+camera) - sys.exit("QuickLook Abort: EXPOSURE NUMBER DOES NOT MATCH THE ONE IN THE HEADER") - EXPNUMstat = "ALARM" - - - - if header["FLAVOR"] != "science" : - - retval["METRICS"] = {"CHECKHDUS_STATUS":HDUstat,"EXPNUM_STATUS":EXPNUMstat} - - else : - retval["METRICS"] = {"CHECKHDUS_STATUS":HDUstat,"EXPNUM_STATUS":EXPNUMstat} - param['SEEING'] = header["SEEING"] - param['AIRMASS'] = header["AIRMASS"] - param['PROGRAM'] = header["PROGRAM"] - - - retval["PARAMS"] = param - - if 'INHERIT' in header and header['INHERIT']: - h0 = raw[0].header - for key in h0: - if key not in header: - header[key] = h0[key] - - return retval - - def get_default_config(self): - return {} - - -class Trace_Shifts(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="XYSHIFTS" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "XYSHIFTS" - status=kwargs['statKey'] if 'statKey' in kwargs else "XYSHIFTS_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - if "XYSHIFTS_WARN_RANGE" in parms and "XYSHIFTS_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["XYSHIFTS_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["XYSHIFTS_NORMAL_RANGE"]),QASeverity.NORMAL)] - MonitoringAlg.__init__(self,name,im,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Trace_Shifts': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - image = get_image('preproc',night,expid,camera,kwargs["specdir"]) - else: image=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(image,inputs) - - def run_qa(self,image,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - - #- qa dictionary - retval={} - retval["PANAME" ]= paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = expid = '{0:08d}'.format(image.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = image.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if image.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=fibmap[1].header['PROGRAM'] - - - retval["NIGHT"] = night = image.meta["NIGHT"] - - - if param is None: - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - # create xytraceset object - - from desispec.calibfinder import findcalibfile - from desispec.xytraceset import XYTraceSet - #SE: all next lines till the dashed line exist just so that we get the psf name without hardcoding any address -> there must be a better way - rawfile = findfile('raw',int(night),int(expid),camera,rawdata_dir=os.environ["QL_SPEC_DATA"]) - hdulist=fits.open(rawfile) - primary_header=hdulist[0].header - camera_header =hdulist[camera].header - hdulist.close() - #-------------------------------------------------------- - psffile=findcalibfile([camera_header,primary_header],"PSF") - psf=fits.open(psffile) - xcoef=psf['XTRACE'].data - ycoef=psf['YTRACE'].data - wavemin=psf["XTRACE"].header["WAVEMIN"] - wavemax=psf["XTRACE"].header["WAVEMAX"] - npix_y=image.meta['NAXIS2'] - psftrace=XYTraceSet(xcoef,ycoef,wavemin,wavemax,npix_y=npix_y) - - # compute dx and dy - from desispec.trace_shifts import compute_dx_from_cross_dispersion_profiles as compute_dx - from desispec.trace_shifts import compute_dy_using_boxcar_extraction as compute_dy - fibers=np.arange(500) #RS: setting nfibers to 500 for now - ox,oy,odx,oex,of,ol=compute_dx(xcoef,ycoef,wavemin,wavemax,image,fibers=fibers) - x_for_dy,y_for_dy,ody,ey,fiber_for_dy,wave_for_dy,dwave,dwave_err=compute_dy(psftrace,image,fibers) - - # return average shifts in x and y - dx=np.mean(odx) - dy=np.mean(ody) - xyshift=np.array([dx,dy]) - - retval["METRICS"]={"XYSHIFTS":xyshift} - retval["PARAMS"]=param - - #get_outputs(qafile,qafig,retval,'plot_traceshifts') -# outfile = qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - return retval - - def get_default_config(self): - return {} - - -class Bias_From_Overscan(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="BIAS_OVERSCAN" - - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "BIAS_AMP" - status=kwargs['statKey'] if 'statKey' in kwargs else "BIAS_AMP_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "BIAS_WARN_RANGE" in parms and "BIAS_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["BIAS_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["BIAS_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - - MonitoringAlg.__init__(self,name,im,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Bias_From_Overscan': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - image = get_image('preproc',night,expid,camera,kwargs["specdir"]) - else: image=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(image,inputs) - - def run_qa(self,image,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - retval={} - retval["EXPID"] = '{0:08d}'.format(image.meta["EXPID"]) - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["CAMERA"] = camera - retval["NIGHT"] = image.meta["NIGHT"] - retval["FLAVOR"] = flavor = image.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if image.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=fibmap[1].header['PROGRAM'] - - retval["EXPTIME"] = image.meta["EXPTIME"] - - - if retval["FLAVOR"] == 'arc': - pass - else: - retval["FLAVOR"] = image.meta["FLAVOR"] - retval["NIGHT"] = image.meta["NIGHT"] - kwargs=self.config['kwargs'] - - #SE: this would give the desispec version stored in DEPVER07 key of the raw simulated fits file :0.16.0.dev1830 - #RS: don't look for this if not using simulated files, differences in simulated headers vs. data headers cause this to crash - if flavor == 'science': - param['FITS_DESISPEC_VERSION'] = image.meta['DEPVER07'] - import desispec - from desispec import quicklook - param['PROC_DESISPEC_VERSION']= desispec.__version__ - param['PROC_QuickLook_VERSION']= quicklook.__qlversion__ - - - if 'INHERIT' in image.meta and image.meta['INHERIT']: - - h0 = image.meta - #h0 = header - for key in h0: - if key not in image.meta: - image.meta[key] = h0[key] - - #RS: look for values in simulated data, if not found try finding data values - try: - bias_overscan = [image.meta['OVERSCN1'],image.meta['OVERSCN2'],image.meta['OVERSCN3'],image.meta['OVERSCN4']] - except: - bias_overscan = [image.meta['OVERSCNA'],image.meta['OVERSCNB'],image.meta['OVERSCNC'],image.meta['OVERSCND']] - - bias = np.mean(bias_overscan) - - if param is None: - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - - retval["PARAMS"] = param - - if amps: - bias_amps=np.array(bias_overscan) - retval["METRICS"]={'BIAS_AMP':bias_amps} - else: - #retval["METRICS"]={'BIAS':bias,"DIFF1SIG":diff1sig,"DIFF2SIG":diff2sig,"DIFF3SIG":diff3sig,"DATA5SIG":data5sig,"BIAS_ROW":mean_row} - retval["METRICS"]={} - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_bias_overscan(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - - -class Get_RMS(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="RMS" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "NOISE_AMP" - status=kwargs['statKey'] if 'statKey' in kwargs else "NOISE_AMP_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "NOISE_WARN_RANGE" in parms and "NOISE_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["NOISE_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["NOISE_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - - MonitoringAlg.__init__(self,name,im,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Get_RMS': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - image = get_image('preproc',night,expid,camera,kwargs["specdir"]) - else: image=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(image,inputs) - - def run_qa(self,image,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - retval={} - retval["EXPID"] = '{0:08d}'.format(image.meta["EXPID"]) - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["CAMERA"] = camera - retval["FLAVOR"] = flavor = image.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if flavor == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=fibmap[1].header['PROGRAM'] - - retval["NIGHT"] = image.meta["NIGHT"] - - # return rms values in rms/sqrt(exptime) - #rmsccd=qalib.getrms(image.pix/np.sqrt(image.meta["EXPTIME"])) #- should we add dark current and/or readnoise to this as well? - #rmsccd = np.mean([image.meta['RDNOISE1'],image.meta['RDNOISE2'],image.meta['RDNOISE3'],image.meta['RDNOISE4']]) #--> "NOISE":rmsccd - - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - retval["PARAMS"] = param - - #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - # SE: this section is moved from BIAS_FROM_OVERSCAN to header - - data=[] - row_data_amp1=[] - row_data_amp2=[] - row_data_amp3=[] - row_data_amp4=[] - bias_patnoise=[] - #bias_overscan=[] - #RS: loop through amps based on header info - loop_amps = get_amp_ids(image.meta) - exptime=image.meta["EXPTIME"] - if exptime == 0.: - exptime = 1. - for kk in loop_amps: - sel=parse_sec_keyword(image.meta['BIASSEC'+kk]) - #- Obtain counts/second in bias region -# pixdata=image[sel]/header["EXPTIME"] - pixdata=image.pix[sel]/exptime - if kk == '1' or kk == 'A': - for i in range(pixdata.shape[0]): - row_amp1=pixdata[i] - row_data_amp1.append(row_amp1) - if kk == '2' or kk == 'B': - - for i in range(pixdata.shape[0]): - row_amp2=pixdata[i] - row_data_amp2.append(row_amp2) - if kk == '3' or kk == 'C': - - for i in range(pixdata.shape[0]): - row_amp3=pixdata[i] - row_data_amp3.append(row_amp3) - if kk == '4' or kk == 'D': - - for i in range(pixdata.shape[0]): - row_amp4=pixdata[i] - row_data_amp4.append(row_amp4) - #- Compute statistics of the bias region that only reject - # the 0.5% of smallest and largest values. (from sdssproc) - isort=np.sort(pixdata.ravel()) - nn=isort.shape[0] - bias=np.mean(isort[int(0.005*nn) : int(0.995*nn)]) - #bias_overscan.append(bias) - data.append(isort) - - #- Combine data from each row per amp and take average - # BIAS_ROW = mean_row - median_row_amp1=[] - for i in range(len(row_data_amp1)): - median=np.median(row_data_amp1[i]) - median_row_amp1.append(median) - - rms_median_row_amp1= np.std(median_row_amp1) - try: - noise1 = image.meta['RDNOISE1'] - except: - noise1 = image.meta['OBSRDNA'] - bias_patnoise.append(rms_median_row_amp1/noise1) - - median_row_amp2=[] - for i in range(len(row_data_amp2)): - median=np.median(row_data_amp2[i]) - median_row_amp2.append(median) - - rms_median_row_amp2= np.std(median_row_amp2) - try: - noise2 = image.meta['RDNOISE2'] - except: - noise2 = image.meta['OBSRDNB'] - bias_patnoise.append(rms_median_row_amp2/noise2) - - - median_row_amp3=[] - for i in range(len(row_data_amp3)): - median=np.median(row_data_amp3[i]) - median_row_amp3.append(median) - - rms_median_row_amp3= np.std(median_row_amp3) - try: - noise3 = image.meta['RDNOISE3'] - except: - noise3 = image.meta['OBSRDNC'] - bias_patnoise.append(rms_median_row_amp3/noise3) - - median_row_amp4=[] - for i in range(len(row_data_amp4)): - median=np.median(row_data_amp4[i]) - median_row_amp4.append(median) - - rms_median_row_amp4= np.std(median_row_amp4) - try: - noise4 = image.meta['RDNOISE4'] - except: - noise4 = image.meta['OBSRDND'] - bias_patnoise.append(rms_median_row_amp4/noise4) - - - #- Calculate upper and lower bounds of 1, 2, and 3 sigma - full_data=np.concatenate((data[0],data[1],data[2],data[3])).ravel() - sig1_lo = np.percentile(full_data,50.-(param['PERCENTILES'][0]/2.)) - sig1_hi = np.percentile(full_data,50.+(param['PERCENTILES'][0]/2.)) - sig2_lo = np.percentile(full_data,50.-(param['PERCENTILES'][1]/2.)) - sig2_hi = np.percentile(full_data,50.+(param['PERCENTILES'][1]/2.)) - sig3_lo = np.percentile(full_data,50.-(param['PERCENTILES'][2]/2.)) - sig3_hi = np.percentile(full_data,50.+(param['PERCENTILES'][2]/2.)) - - #- Find difference between upper and lower sigma bounds - # DIFF1SIG: The number of counts separating the 1 sigma percentiles in the noise distribution (from the overscan region) - diff1sig = sig1_hi - sig1_lo - # DIFF2SIG: The number of counts separating 2 or 3 sigma in the noise distribution - diff2sig = sig2_hi - sig2_lo - diff3sig = sig3_hi - sig3_lo - - #-DATA5SIG: number of pixels more than 5 sigma below the bias level - sig5_value = np.percentile(full_data,3e-5) - data5sig = len(np.where(full_data <= sig5_value)[0]) - - #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - - if amps: - rms_over_amps = [noise1,noise2,noise3,noise4] - try: - rms_amps = [image.meta['OBSRDN1'],image.meta['OBSRDN2'],image.meta['OBSRDN3'],image.meta['OBSRDN4']] - except: - rms_amps = [image.meta['OBSRDNA'],image.meta['OBSRDNB'],image.meta['OBSRDNC'],image.meta['OBSRDND']] - retval["METRICS"]={"NOISE_AMP":np.array(rms_amps),"NOISE_OVERSCAN_AMP":np.array(rms_over_amps),"DIFF1SIG":diff1sig,"DIFF2SIG":diff2sig,"DATA5SIG":data5sig,"BIAS_PATNOISE":bias_patnoise}#,"NOISE_ROW":noise_row,"EXPNUM_WARN":expnum,"NOISE_OVER":rmsover - else: - retval["METRICS"]={"DIFF1SIG":diff1sig,"DIFF2SIG":diff2sig,"DATA5SIG":data5sig, "BIAS_PATNOISE":bias_patnoise} # Dropping "NOISE_OVER":rmsover,"NOISE_ROW":noise_row,"EXPNUM_WARN":expnum - - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_RMS(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - - -class Calc_XWSigma(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="XWSIGMA" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "XWSIGMA" - status=kwargs['statKey'] if 'statKey' in kwargs else "XWSIGMA_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "XWSIGMA_WARN_RANGE" in parms and "XWSIGMA_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["XWSIGMA_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["XWSIGMA_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - MonitoringAlg.__init__(self,name,im,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Calc_XWSigma': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - image = get_image('preproc',night,expid,camera,kwargs["specdir"]) - else: image=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(image,inputs) - - def run_qa(self,image,inputs): - import desispec.quicklook.qlpsf - from scipy.optimize import curve_fit - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - psffile=inputs["psf"] - psf=desispec.quicklook.qlpsf.PSF(psffile) - amps=inputs["amps"] - allpeaks=inputs["Peaks"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - retval={} - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = '{0:08d}'.format(image.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = image.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if image.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=program=fibmap[1].header['PROGRAM'] - - retval["NIGHT"] = image.meta["NIGHT"] - - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - retval["PARAMS"] = param - #- Ensure that the QA will run even if 500 spectra aren't present - if fibermap['FIBER'].shape[0] >= 500: - fibers = 500 - else: - fibers = fibermap['FIBER'].shape[0] - - #- Define number of pixels to be fit - dp=param['PIXEL_RANGE']/2 - #- Get wavelength ranges around peaks - peaks=allpeaks['{}_PEAKS'.format(camera[0].upper())] - #- Maximum allowed fit sigma value - maxsigma=param['MAX_SIGMA'] - - xfails=[] - wfails=[] - xsigma=[] - wsigma=[] - xsigma_amp1=[] - wsigma_amp1=[] - xsigma_amp2=[] - wsigma_amp2=[] - xsigma_amp3=[] - wsigma_amp3=[] - xsigma_amp4=[] - wsigma_amp4=[] - - for fiber in range(fibers): - - xs = -1 # SE: this prevents crash in "XWSIGMA_AMP" for when xs or ws is empty list -> try b9 of 20200515/00000001 - ws = -1 - xsig=[] - wsig=[] - for peak in range(len(peaks)): - #- Use psf information to convert wavelength to pixel values - xpixel=desispec.quicklook.qlpsf.PSF.x(psf,ispec=fiber,wavelength=peaks[peak])[0][0] - ypixel=desispec.quicklook.qlpsf.PSF.y(psf,ispec=fiber,wavelength=peaks[peak])[0][0] - #- Find x and y pixel values around sky lines - xpix_peak=np.arange(int(xpixel-dp),int(xpixel+dp),1) - ypix_peak=np.arange(int(ypixel-dp),int(ypixel+dp),1) - #- Fit gaussian to counts in pixels around sky line - #- If any values fail, store x/w, wavelength, and fiber - try: - xpopt,xpcov=curve_fit(qalib.gauss,np.arange(len(xpix_peak)),image.pix[int(ypixel),xpix_peak]) - xs=np.abs(xpopt[2]) - if xs <= maxsigma: - xsig.append(xs) - else: - xfail=[fiber,peaks[peak]] - xfails.append(xfail) - except: - xfail=[fiber,peaks[peak]] - xfails.append(xfail) - pass - try: - wpopt,wpcov=curve_fit(qalib.gauss,np.arange(len(ypix_peak)),image.pix[ypix_peak,int(xpixel)]) - ws=np.abs(wpopt[2]) - if ws <= maxsigma: - wsig.append(ws) - else: - wfail=[fiber,peaks[peak]] - wfails.append(wfail) - except: - wfail=[fiber,peaks[peak]] - wfails.append(wfail) - pass - - #- Excluding fibers 240-260 in case some fibers overlap amps - #- Excluding peaks in the center of image in case peak overlaps two amps - #- This shouldn't cause a significant loss of information - - if amps: - - if fibermap['FIBER'][fiber]<240: - if ypixel < 2000.: - xsigma_amp1.append(xs) - wsigma_amp1.append(ws) - if ypixel > 2100.: - xsigma_amp3.append(xs) - wsigma_amp3.append(ws) - - if fibermap['FIBER'][fiber]>260: - if ypixel < 2000.: - xsigma_amp2.append(xs) - wsigma_amp2.append(ws) - if ypixel > 2100.: - xsigma_amp4.append(xs) - wsigma_amp4.append(ws) - - - if len(xsig)!=0: - xsigma.append(np.mean(xsig)) - if len(wsig)!=0: - wsigma.append(np.mean(wsig)) - - if fibermap['FIBER'].shape[0]<260: - xsigma_amp2=[] - xsigma_amp4=[] - wsigma_amp2=[] - wsigma_amp4=[] - - #- Calculate desired output metrics - xsigma_med=np.median(np.array(xsigma)) - wsigma_med=np.median(np.array(wsigma)) - xsigma_amp=np.array([np.median(xsigma_amp1),np.median(xsigma_amp2),np.median(xsigma_amp3),np.median(xsigma_amp4)]) - wsigma_amp=np.array([np.median(wsigma_amp1),np.median(wsigma_amp2),np.median(wsigma_amp3),np.median(wsigma_amp4)]) - xwfails=np.array([xfails,wfails]) - - - #SE: mention the example here when the next lines are ineffective and when they are effective in removing the NaN from XWSIGMA_AMP--> XWSIGMA itself no longer includes any NaN value. As we both know, this is not the way to properly deal with NaNs -->let's see if switching to non-scipy fuction would bring about a better solution - if len(xsigma)==0: - xsigma=[param['XWSIGMA_{}_REF'.format(program.upper())][0]] - - if len(wsigma)==0: - wsigma=[param['XWSIGMA_{}_REF'.format(program.upper())][1]] - - #- Combine metrics for x and w - xwsigma_fib=np.array((xsigma,wsigma)) #- (2,nfib) - xwsigma_med=np.array((xsigma_med,wsigma_med)) #- (2) - xwsigma_amp=np.array((xsigma_amp,wsigma_amp)) - - if amps: - #if len(xsigma_amp1)==0 : - #xsigma_amp1 = [param['XWSIGMA_REF'][0]] - #if len(xsigma_amp2)==0 : - #xsigma_amp2 = [param['XWSIGMA_REF'][0]] - #if len(xsigma_amp3)==0 : - #xsigma_amp3 = [param['XWSIGMA_REF'][0]] - #if len(xsigma_amp4)==0 : - #xsigma_amp4 = [param['XWSIGMA_REF'][0]] - - #if len(wsigma_amp1)==0 : - #wsigma_amp1 = [param['XWSIGMA_REF'][1]] - #if len(wsigma_amp2)==0 : - #wsigma_amp2 = [param['XWSIGMA_REF'][1]] - #if len(wsigma_amp3)==0 : - #wsigma_amp3 = [param['XWSIGMA_REF'][1]] - #if len(wsigma_amp4)==0 : - #wsigma_amp4 = [param['XWSIGMA_REF'][1]] - - retval["METRICS"]={"XWSIGMA":xwsigma_med,"XWSIGMA_FIB":xwsigma_fib,"XWSIGMA_AMP":xwsigma_amp}#,"XWSHIFT":xwshift,"XWSHIFT_AMP":xwshift_amp,"XWSIGMA_SHIFT": xwsigma_shift} - else: - retval["METRICS"]={"XWSIGMA":xwsigma_med,"XWSIGMA_FIB":xwsigma_fib}#,"XWSHIFT":xwshift,"XWSIGMA_SHIFT": xwsigma_shift} - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_XWSigma(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - - -class Count_Pixels(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="COUNTPIX" - from desispec.image import Image as im - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "LITFRAC_AMP" - status=kwargs['statKey'] if 'statKey' in kwargs else "LITFRAC_AMP_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "LITFRAC_AMP_WARN_RANGE" in parms and "LITFRAC_AMP_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["LITFRAC_AMP_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["LITFRAC_AMP_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - MonitoringAlg.__init__(self,name,im,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Count_Pixels': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - image = get_image('preproc',night,expid,camera,kwargs["specdir"]) - else: image=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(image,inputs) - - def run_qa(self,image,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - retval={} - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = '{0:08d}'.format(image.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = image.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if image.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=fibmap[1].header['PROGRAM'] - - - retval["NIGHT"] = image.meta["NIGHT"] - - - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - - retval["PARAMS"] = param - - #- get the counts for each amp - npix_amps=[] - litfrac_amps=[] - - from desispec.preproc import parse_sec_keyword - #RS: loop through amps based on header info - try: - header_test=parse_sec_keyword(image.meta['CCDSEC1']) - loop_amps=['1','2','3','4'] - except: - loop_amps=['A','B','C','D'] - #- get amp boundary in pixels - for kk in loop_amps: - ampboundary=parse_sec_keyword(image.meta["CCDSEC"+kk]) - try: - rdnoise_thisamp=image.meta["RDNOISE"+kk] - except: - rdnoise_thisamp=image.meta["OBSRDN"+kk] - npix_thisamp= image.pix[ampboundary][image.pix[ampboundary] > param['CUTPIX'] * rdnoise_thisamp].size #- no of pixels above threshold - npix_amps.append(npix_thisamp) - size_thisamp=image.pix[ampboundary].size - litfrac_thisamp=round(np.float64(npix_thisamp)/size_thisamp,2) #- fraction of pixels getting light above threshold - litfrac_amps.append(litfrac_thisamp) - # retval["METRICS"]={"NPIX_AMP",npix_amps,'LITFRAC_AMP': litfrac_amps} - retval["METRICS"]={"LITFRAC_AMP": litfrac_amps} - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_countpix(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - - -class CountSpectralBins(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="COUNTBINS" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "NGOODFIB" - status=kwargs['statKey'] if 'statKey' in kwargs else "NGOODFIB_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "NGOODFIB_WARN_RANGE" in parms and "NGOODFIB_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["NGOODFIB_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["NGOODFIB_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - - MonitoringAlg.__init__(self,name,fr,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'CountSpectralBins': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - frame = get_frame('frame',night,expid,camera,kwargs["specdir"]) - else: frame=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(frame,inputs) - - def run_qa(self,frame,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - amps=inputs["amps"] - psf=inputs["psf"] - qafile=inputs["qafile"] - qafig=None #inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - if isinstance(frame,QFrame): - frame = frame.asframe() - - #- qa dictionary - retval={} - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = '{0:08d}'.format(frame.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = frame.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if frame.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=fibmap[1].header['PROGRAM'] - - retval["NIGHT"] = frame.meta["NIGHT"] - - grid=np.gradient(frame.wave) - if not np.all(grid[0]==grid[1:]): - log.debug("grid_size is NOT UNIFORM") - - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - - retval["PARAMS"] = param - #- get the effective readnoise for the fibers - #- readnoise per fib = readnoise per pix * sqrt(box car width)* sqrt(no. of bins in the amp) * binsize/pix size scale - nspec=frame.nspec - rdnoise_fib=np.zeros(nspec) - if nspec > 250: #- upto 250 - amp 1 and 3, beyond that 2 and 4 - rdnoise_fib[:250]=[(frame.meta['RDNOISE1']+frame.meta['RDNOISE3'])*np.sqrt(5.)*np.sqrt(frame.flux.shape[1]/2)*frame.meta['WAVESTEP']/0.5]*250 - rdnoise_fib[250:]=[(frame.meta['RDNOISE2']+frame.meta['RDNOISE4'])*np.sqrt(5.)*np.sqrt(frame.flux.shape[1]/2)*frame.meta['WAVESTEP']/0.5]*(nspec-250) - else: - rdnoise_fib=[(frame.meta['RDNOISE1']+frame.meta['RDNOISE3'])*np.sqrt(5.)*np.sqrt(frame.flux.shape[1]/2)*frame.meta['WAVESTEP']/0.5]*nspec - threshold=[param['CUTBINS']*ii for ii in rdnoise_fib] - #- compare the flux sum to threshold - - totcounts=frame.flux.sum(axis=1) - passfibers=np.where(totcounts>threshold)[0] - ngoodfibers=passfibers.shape[0] - good_fibers=np.array([0]*frame.nspec) - good_fibers[passfibers]=1 #- assign 1 for good fiber - - #- leaving the amps granularity needed for caching as defunct. If needed in future, this needs to be propagated through. - amps=False - leftmax=None - rightmax=None - bottommax=None - topmin=None - - if amps: #- leaving this for now - leftmax,rightmin,bottommax,topmin = qalib.fiducialregion(frame,psf) - retval["LEFT_MAX_FIBER"]=int(leftmax) - retval["RIGHT_MIN_FIBER"]=int(rightmin) - retval["BOTTOM_MAX_WAVE_INDEX"]=int(bottommax) - retval["TOP_MIN_WAVE_INDEX"]=int(topmin) - - retval["METRICS"]={"NGOODFIB": ngoodfibers, "GOOD_FIBERS": good_fibers, "TOTCOUNT_FIB": totcounts} - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_countspectralbins(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - - -class Sky_Continuum(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="SKYCONT" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "SKYCONT" - status=kwargs['statKey'] if 'statKey' in kwargs else "SKYCONT_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "SKYCONT_WARN_RANGE" in parms and "SKYCONT_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["SKYCONT_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["SKYCONT_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - MonitoringAlg.__init__(self,name,fr,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Sky_Continuum': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - frame = get_frame('fframe',night,expid,camera,kwargs["specdir"]) - else: frame=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(frame,inputs) - - def run_qa(self,frame,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - if isinstance(frame,QFrame): - frame = frame.asframe() - - #- qa dictionary - retval={} - retval["PANAME" ]= paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = '{0:08d}'.format(frame.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = frame.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if frame.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=fibmap[1].header['PROGRAM'] - - retval["NIGHT"] = frame.meta["NIGHT"] - - camera=frame.meta["CAMERA"] - - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - - wrange1=param["{}_CONT".format(camera[0].upper())][0] - wrange2=param["{}_CONT".format(camera[0].upper())][1] - - retval["PARAMS"] = param - - skyfiber, contfiberlow, contfiberhigh, meancontfiber, skycont = qalib.sky_continuum( - frame, wrange1, wrange2) - - - retval["METRICS"]={"SKYFIBERID": skyfiber.tolist(), "SKYCONT":skycont, "SKYCONT_FIBER":meancontfiber} - - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_sky_continuum(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - - -class Sky_Rband(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="SKYRBAND" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "SKYRBAND" - status=kwargs['statKey'] if 'statKey' in kwargs else "SKYRBAND_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "SKYRBAND_WARN_RANGE" in parms and "SKYRBAND_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["SKYRBAND_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["SKYRBAND_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - MonitoringAlg.__init__(self,name,fr,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is found for this QA") - sys.exit("Update the configuration file for the parameters") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Sky_Rband': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - frame = get_frame('cframe',night,expid,camera,kwargs["specdir"]) - else: frame=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(frame,inputs) - - def run_qa(self,frame,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - if isinstance(frame,QFrame): - frame = frame.asframe() - - #- qa dictionary - retval={} - retval["NIGHT"] = frame.meta["NIGHT"] - retval["PANAME" ]= paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = '{0:08d}'.format(frame.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = frame.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if frame.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=program=fibmap[1].header['PROGRAM'] - - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - retval["PARAMS"] = param - - #- Find sky fibers - objects=frame.fibermap['OBJTYPE'] - skyfibers=np.where(objects=="SKY")[0] - - flux=frame.flux - wave=frame.wave - #- Set appropriate filter and zero point - if camera[0].lower() == 'r': - responsefilter='decam2014-r' - - #- Get filter response information from speclite - try: - from importlib import resources - responsefile = resources.files('speclite').joinpath(f'data/filters/{responsefilter}.ecsv') - #- Grab wavelength and response information from file - rfile=np.genfromtxt(responsefile) - rfile=rfile[1:] # remove wavelength/response labels - rwave=np.zeros(rfile.shape[0]) - response=np.zeros(rfile.shape[0]) - for i in range(rfile.shape[0]): - rwave[i]=10.*rfile[i][0] # convert to angstroms - response[i]=rfile[i][1] - except: - log.critical("Could not find filter response file, can't compute spectral magnitudes") - - #- Convole flux with response information - res=np.zeros(frame.wave.shape) - for w in range(response.shape[0]): - if w >= 1 and w<= response.shape[0]-2: - ind=np.abs(frame.wave-rwave[w]).argmin() - lo=(rwave[w]-rwave[w-1])/2 - wlo=rwave[w]-lo - indlo=np.abs(frame.wave-wlo).argmin() - hi=(rwave[w+1]-rwave[w])/2 - whi=rwave[w]+hi - indhi=np.abs(frame.wave-whi).argmin() - res[indlo:indhi]=response[w] - skyrflux=res*flux[skyfibers] - - #- Calculate integrals for sky fibers - integrals=[] - for ii in range(len(skyrflux)): - integrals.append(qalib.integrate_spec(frame.wave,skyrflux[ii])) - integrals=np.array(integrals) - - #- Convert calibrated flux to fiber magnitude - specmags=np.zeros(integrals.shape) - specmags[integrals>0]=21.1-2.5*np.log10(integrals[integrals>0]/frame.meta["EXPTIME"]) - avg_skyrband=np.mean(specmags[specmags>0]) - - retval["METRICS"]={"SKYRBAND_FIB":specmags,"SKYRBAND":avg_skyrband} - - #- If not in r channel, set reference and metrics to zero - else: - retval["PARAMS"]["SKYRBAND_{}_REF".format(program.upper())]=[0.] - zerospec=np.zeros_like(skyfibers) - zerorband=0. - retval["METRICS"]={"SKYRBAND_FIB":zerospec,"SKYRBAND":zerorband} - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - - return retval - - def get_default_config(self): - return {} - - -class Sky_Peaks(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="PEAKCOUNT" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "PEAKCOUNT" - status=kwargs['statKey'] if 'statKey' in kwargs else "PEAKCOUNT_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "PEAKCOUNT_WARN_RANGE" in parms and "PEAKCOUNT_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["PEAKCOUNT_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["PEAKCOUNT_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - MonitoringAlg.__init__(self,name,fr,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Sky_Peaks': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - frame = get_frame('fframe',night,expid,camera,kwargs["specdir"]) - else: frame=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(frame,inputs) - - def run_qa(self,frame,inputs): - from desispec.qa.qalib import sky_peaks - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - amps=inputs["amps"] - allpeaks=inputs["Peaks"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - if isinstance(frame,QFrame): - frame = frame.asframe() - - retval={} - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = '{0:08d}'.format(frame.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = frame.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if frame.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=fibmap[1].header['PROGRAM'] - - retval["NIGHT"] = frame.meta["NIGHT"] - - # Parameters - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - param['B_PEAKS']=allpeaks['B_PEAKS'] - param['R_PEAKS']=allpeaks['R_PEAKS'] - param['Z_PEAKS']=allpeaks['Z_PEAKS'] - - #nspec_counts, sky_counts, tgt_counts, tgt_counts_rms = sky_peaks(param, frame) - nspec_counts, sky_counts, skyfibers, nskyfib= sky_peaks(param, frame) - rms_nspec = np.std(nspec_counts)#qalib.getrms(nspec_counts) - rms_skyspec = np.std(sky_counts)#qalib.getrms(sky_counts) - - sumcount_med_sky=np.median(sky_counts) - - retval["PARAMS"] = param - - fiberid=frame.fibermap['FIBER'] - - retval["METRICS"]={"FIBERID":fiberid,"PEAKCOUNT":sumcount_med_sky,"PEAKCOUNT_NOISE":rms_skyspec,"PEAKCOUNT_FIB":nspec_counts,"SKYFIBERID":skyfibers, "NSKY_FIB":nskyfib}#,"PEAKCOUNT_TGT":tgt_counts,"PEAKCOUNT_TGT_NOISE":tgt_counts_rms} - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_sky_peaks(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - - -class Sky_Residual(MonitoringAlg): - """ - Use offline sky_residual function to calculate sky residuals - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="RESIDUAL" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "RESIDNOISE" - status=kwargs['statKey'] if 'statKey' in kwargs else "RESID_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "RESID_WARN_RANGE" in parms and "RESID_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["RESID_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["RESID_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - - MonitoringAlg.__init__(self,name,fr,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Sky_Residual': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - frame = get_frame('sframe',night,expid,camera,kwargs["specdir"]) - else: frame=args[0] - inputs=get_inputs(*args,**kwargs) - skymodel=args[1] - - return self.run_qa(frame,skymodel,inputs) - - def run_qa(self,frame,skymodel,inputs): - from desispec.sky import qa_skysub - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - if isinstance(frame,QFrame): - frame = frame.asframe() - - if skymodel is None: - raise IOError("Must have skymodel to find residual. It can't be None") - #- return values - retval={} - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = '{0:08d}'.format(frame.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = frame.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if frame.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=fibmap[1].header['PROGRAM'] - - retval["NIGHT"] = frame.meta["NIGHT"] - - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - retval["PARAMS"] = param - - qadict=qalib.sky_resid(param,frame,skymodel,quick_look=True) - - retval["METRICS"] = {} - for key in qadict.keys(): - retval["METRICS"][key] = qadict[key] - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_residuals(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - - -class Integrate_Spec(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="INTEG" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "DELTAMAG_TGT" - status=kwargs['statKey'] if 'statKey' in kwargs else "DELTAMAG_TGT_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "DELTAMAG_WARN_RANGE" in parms and "DELTAMAG_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["DELTAMAG_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["DELTAMAG_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - MonitoringAlg.__init__(self,name,fr,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Integrate_Spec': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - frame = get_frame('cframe',night,expid,camera,kwargs["specdir"]) - else: frame=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(frame,inputs) - - def run_qa(self,frame,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - if isinstance(frame,QFrame): - frame = frame.asframe() - - flux=frame.flux - ivar=frame.ivar - wave=frame.wave - - retval={} - retval["PANAME" ] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["NIGHT"] = frame.meta["NIGHT"] - retval["EXPID"] = '{0:08d}'.format(frame.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = frame.meta["FLAVOR"] - kwargs=self.config['kwargs'] - if frame.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=program=fibmap[1].header['PROGRAM'] - - retval["NIGHT"] = frame.meta["NIGHT"] - - flux=frame.flux - wave=frame.wave - #- Grab magnitudes for appropriate filter - if camera[0].lower() == 'b': - band = 'G' - responsefilter='decam2014-g' - elif camera[0].lower() == 'r': - band = 'R' - responsefilter='decam2014-r' - elif camera[0].lower() == 'z': - band = 'Z' - responsefilter='decam2014-z' - else: - raise ValueError("Camera {} not in b, r, or z channels...".format(camera)) - - #- Find fibers per target type - elgfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.ELG) != 0)[0] - lrgfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.LRG) != 0)[0] - qsofibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.QSO) != 0)[0] - bgsfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.BGS_ANY) != 0)[0] - mwsfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.MWS_ANY) != 0)[0] - stdfibers = np.where(isStdStar(frame.fibermap))[0] - skyfibers = np.where(frame.fibermap['OBJTYPE'] == 'SKY')[0] - - #- Setup target fibers per program - if program == 'dark': - objfibers = [elgfibers,lrgfibers,qsofibers,stdfibers] - elif program == 'gray': - objfibers = [elgfibers,stdfibers] - elif program == 'bright': - objfibers = [bgsfibers,mwsfibers,stdfibers] - - magnitudes=np.zeros(frame.nspec) - key = 'FLUX_'+band - magnitudes = 22.5 - 2.5*np.log10(frame.fibermap[key]) - #- Set objects with zero flux to 30 mag - zeroflux = np.where(frame.fibermap[key]==0.)[0] - magnitudes[zeroflux] = 30. - - #- Get filter response information from speclite - try: - from importlib import resources - responsefile = resources.files('speclite').joinpath(f'data/filters/{responsefilter}.ecsv') - #- Grab wavelength and response information from file - rfile=np.genfromtxt(responsefile) - rfile=rfile[1:] # remove wavelength/response labels - rwave=np.zeros(rfile.shape[0]) - response=np.zeros(rfile.shape[0]) - for i in range(rfile.shape[0]): - rwave[i]=10.*rfile[i][0] # convert to angstroms - response[i]=rfile[i][1] - except: - log.critical("Could not find filter response file, can't compute spectral magnitudes") - - #- Convole flux with response information - res=np.zeros(frame.wave.shape) - for w in range(response.shape[0]): - if w >= 1 and w<= response.shape[0]-2: - ind=np.abs(frame.wave-rwave[w]).argmin() - lo=(rwave[w]-rwave[w-1])/2 - wlo=rwave[w]-lo - indlo=np.abs(frame.wave-wlo).argmin() - hi=(rwave[w+1]-rwave[w])/2 - whi=rwave[w]+hi - indhi=np.abs(frame.wave-whi).argmin() - res[indlo:indhi]=response[w] - rflux=res*flux - - #- Calculate integrals for all fibers - integrals=[] - for ii in range(len(rflux)): - integrals.append(qalib.integrate_spec(frame.wave,rflux[ii])) - integrals=np.array(integrals) - - #- Convert calibrated flux to spectral magnitude - specmags=np.zeros(integrals.shape) - specmags[integrals>0]=21.1-2.5*np.log10(integrals[integrals>0]/frame.meta["EXPTIME"]) - - #- Save number of negative flux fibers - negflux=np.where(specmags==0.)[0] - num_negflux=len(negflux) - - #- Set sky and negative flux fibers to 30 mag - specmags[skyfibers]=30. - specmags[negflux]=30. - - #- Calculate integrals for each target type - tgt_specmags=[] - for T in objfibers: - if num_negflux != 0: - T=np.array(list(set(T) - set(negflux))) - obj_integ=[] - for ii in range(len(rflux[T])): - obj_integ.append(qalib.integrate_spec(frame.wave,rflux[T][ii])) - obj_integ = np.array(obj_integ) - - #- Convert calibrated flux to spectral magnitude per terget type - #- Using ST magnitude system because frame flux is in units ergs/s/cm**2/A - obj_specmags = np.zeros(obj_integ.shape) - obj_specmags[obj_integ>0] = 21.1-2.5*np.log10(obj_integ[obj_integ>0]/frame.meta["EXPTIME"]) - tgt_specmags.append(obj_specmags) - - tgt_specmags = np.array(tgt_specmags) - - #- Fiber magnitudes per target type - tgt_mags=[] - for obj in objfibers: - if num_negflux != 0: - obj=np.array(list(set(obj) - set(negflux))) - tgt_mags.append(magnitudes[obj]) - - tgt_mags = np.array(tgt_mags) - - #- Calculate delta mag, remove sky/negative flux fibers first - remove_fib = np.array(list(set(skyfibers) | set(negflux))) - nosky_specmags = np.delete(specmags,remove_fib) - nosky_mags = np.delete(magnitudes,remove_fib) - deltamag = nosky_specmags - nosky_mags - - #- Calculate avg delta mag per target type - deltamag_tgt = tgt_specmags - tgt_mags - deltamag_tgt_avg=[] - for tgt in range(len(deltamag_tgt)): - deltamag_tgt_avg.append(np.mean(deltamag_tgt[tgt])) - - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - retval["PARAMS"] = param - - fiberid=frame.fibermap['FIBER'] - - #SE: should not have any nan or inf at this point but let's keep it for safety measures here - retval["METRICS"]={"FIBERID":fiberid,"NFIBNOTGT":num_negflux,"SPEC_MAGS":specmags, "DELTAMAG":np.nan_to_num(deltamag), "STD_FIBERID":stdfibers, "DELTAMAG_TGT":np.nan_to_num(deltamag_tgt_avg)} - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_integral(retval,qafig,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - -class Calculate_SNR(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="SNR" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "FIDSNR_TGT" - status=kwargs['statKey'] if 'statKey' in kwargs else "FIDSNR_TGT_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "FIDSNR_TGT_WARN_RANGE" in parms and "FIDSNR_TGT_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["FIDSNR_TGT_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["FIDSNR_TGT_NORMAL_RANGE"]),QASeverity.NORMAL)]# sorted by most severe to least severe - MonitoringAlg.__init__(self,name,fr,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Calculate_SNR': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - frame = get_frame('sframe',night,expid,camera,kwargs["specdir"]) - else: frame=args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(frame,inputs) - - def run_qa(self,frame,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - if isinstance(frame,QFrame): - frame = frame.asframe() - - #- return values - retval={} - retval["PANAME"] = paname - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = expid = '{0:08d}'.format(frame.meta["EXPID"]) - retval["CAMERA"] = camera - retval["FLAVOR"] = frame.meta["FLAVOR"] - kwargs=self.config['kwargs'] - - if frame.meta["FLAVOR"] == 'science': - fibmap =fits.open(kwargs['FiberMap']) - retval["PROGRAM"]=program=fibmap[1].header['PROGRAM'] - - objlist=[] - if program == 'dark': - objlist = ['ELG','LRG','QSO','STAR'] - elif program == 'gray': - objlist = ['ELG','STAR'] - elif program == 'bright': - objlist = ['BGS','MWS','STAR'] - - retval["NIGHT"] = night = frame.meta["NIGHT"] - - ra = fibermap["TARGET_RA"] - dec = fibermap["TARGET_DEC"] - - #- select band for mag, using DECAM_R if present - if param is None: - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - - fidboundary=None - - qadict,fitsnr = qalib.orig_SNRFit(frame,night,camera,expid,param,fidboundary=fidboundary) - - #- Check for inf and nans in missing magnitudes for json support of QLF #TODO review this later - - for obj in range(len(qadict["SNR_MAG_TGT"])): - for mag in [qadict["SNR_MAG_TGT"][obj]]: - k=np.where(~np.isfinite(mag))[0] - if len(k) > 0: - log.warning("{} objects have no or unphysical magnitudes".format(len(k))) - mag=np.array(mag) - mag[k]=26. #- Putting 26, so as to make sure within reasonable range for plots. - retval["METRICS"] = qadict - retval["PARAMS"] = param - - rescut=param["RESIDUAL_CUT"] - sigmacut=param["SIGMA_CUT"] - - ############################################################### - # This section is for adding QA metrics for plotting purposes # - ############################################################### - - ############################################################### - -# if qafile is not None: -# outfile=qa.write_qa_ql(qafile,retval) -# log.debug("Output QA data is in {}".format(outfile)) - if qafig is not None: - fig.plot_SNR(retval,qafig,objlist,fitsnr,rescut,sigmacut,plotconf=plotconf,hardplots=hardplots) - log.debug("Output QA fig {}".format(qafig)) - - return retval - - def get_default_config(self): - return {} - -class Check_Resolution(MonitoringAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="CHECKARC" - kwargs=config['kwargs'] - parms=kwargs['param'] - key=kwargs['refKey'] if 'refKey' in kwargs else "CHECKARC" - status=kwargs['statKey'] if 'statKey' in kwargs else "CHECKARC_STATUS" - kwargs["RESULTKEY"]=key - kwargs["QASTATUSKEY"]=status - - if "ReferenceMetrics" in kwargs: - r=kwargs["ReferenceMetrics"] - if key in r: - kwargs["REFERENCE"]=r[key] - - if "CHECKARC_WARN_RANGE" in parms and "CHECKARC_NORMAL_RANGE" in parms: - kwargs["RANGES"]=[(np.asarray(parms["CHECKARC_WARN_RANGE"]),QASeverity.WARNING), - (np.asarray(parms["CHECKARC_NORMAL_RANGE"]),QASeverity.NORMAL)] - - MonitoringAlg.__init__(self,name,fr,config,logger) - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("No parameter is given for this QA! ") - sys.exit("Check the configuration file") - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - if kwargs["singleqa"] == 'Check_Resolution': - night = kwargs['night'] - expid = '{:08d}'.format(kwargs['expid']) - camera = kwargs['camera'] - #- Finding psf file for QA - #file_psf = get_psf('psf',night,expid,camera,kwargs["specdir"]) - else: file_psf = args[0] - inputs=get_inputs(*args,**kwargs) - - return self.run_qa(file_psf,inputs) - - def run_qa(self,file_psf,inputs): - camera=inputs["camera"] - paname=inputs["paname"] - fibermap=inputs["fibermap"] - amps=inputs["amps"] - qafile=inputs["qafile"] - qafig=inputs["qafig"] - param=inputs["param"] - refmetrics=inputs["refmetrics"] - plotconf=inputs["plotconf"] - hardplots=inputs["hardplots"] - - retval={} - retval['PANAME'] = paname - kwargs=self.config['kwargs'] - retval["QATIME"] = datetime.datetime.now().isoformat() - retval["EXPID"] = '{:08d}'.format(kwargs['expid']) - retval["CAMERA"] = camera - retval["PROGRAM"] = 'ARC' - retval["FLAVOR"] = 'arc' - retval["NIGHT"] = kwargs['night'] - - - # file_psf.ycoeff is not the wsigma_array. - # FIX later.TEST QA with file_psf.ycoeff - - wsigma_array = file_psf.ysig_vs_wave_traceset._coeff - p0 = wsigma_array[0:, 0:1] - p1 = wsigma_array[0:, 1:2] - p2 = wsigma_array[0:, 2:3] - - #- Save array of ones and zeros for good/no fits - nfib = len(p0) - nofit = np.where(p0 == 0.)[0] - allfibs=np.ones(nfib) - allfibs[nofit] = 0. - #- Total number of fibers fit used as scalar metric - ngoodfits = len(np.where(allfibs == 1.)[0]) - - # Medians of Legendre Coeffs to be used as 'Model' - medlegpolcoef = np.median(wsigma_array,axis = 0) - - wsigma_rms = np.sqrt(np.mean((wsigma_array - medlegpolcoef)**2,axis = 0)) - - # Check how many of each parameter are outside of +- 2 RMS of the median. - toperror = np.array([medlegpolcoef[val] + 2*wsigma_rms[val] for val in [0,1,2]]) - bottomerror = np.array([medlegpolcoef[val] - 2*wsigma_rms[val] for val in [0,1,2]]) - - badparamrnum0 = list(np.where(np.logical_or(p0>toperror[0], p0toperror[1], p1toperror[2], p2 ampboundary[1].start): - startspec=ispec - #-cutting off wavelenth boundaries from startspec - yy=psf.y(ispec,frame.wave) - k=np.where(yy > ampboundary[0].start)[0] - startwave0=k[0] - yy=psf.y(ispec,frame.wave) - k=np.where(yy < ampboundary[0].stop)[0] - endwave0=k[-1] - break - else: - startspec=None - startwave0=None - endwave0=None - if startspec is not None: - for ispec in range(frame.flux.shape[0])[::-1]: - if np.all(psf.x(ispec) < ampboundary[1].stop): - endspec=ispec - #-cutting off wavelenth boundaries from startspec - yy=psf.y(ispec,frame.wave) - k=np.where(yy > ampboundary[0].start)[0] - startwave1=k[0] - yy=psf.y(ispec,frame.wave) - k=np.where(yy < ampboundary[0].stop)[0] - endwave1=k[-1] - break - else: - endspec=None - startwave1=None - endwave1=None - if startwave0 is not None and startwave1 is not None: - startwave=max(startwave0,startwave1) - else: startwave = None - if endwave0 is not None and endwave1 is not None: - endwave=min(endwave0,endwave1) - else: endwave = None - if endspec is not None: - #endspec+=1 #- last entry exclusive in slice, so add 1 - #endwave+=1 - - if endspec < leftmax: - leftmax=endspec - if startspec > rightmin: - rightmin=startspec - if endwave < bottommax: - bottommax=endwave - if startwave > topmin: - topmin=startwave - else: - rightmin=0 #- Only if no spec in right side of CCD. passing 0 to encertain valid data type. Nontype throws a type error in yaml.dump. - - #fiducialb=(slice(startspec,endspec,None),slice(startwave,endwave,None)) #- Note: y,x --> spec, wavelength - #fidboundary.append(fiducialb) - - #- return pixboundary,fidboundary - return leftmax,rightmin,bottommax,topmin - -def slice_fidboundary(frame,leftmax,rightmin,bottommax,topmin): - """ - leftmax,rightmin,bottommax,topmin - Indices in spec-wavelength space for different amps (e.g output from fiducialregion function) - #- This could be merged to fiducialregion function - - Returns (list): - list of tuples of slices for spec- wavelength boundary for the amps. - """ - leftmax+=1 #- last entry not counted in slice - bottommax+=1 - if rightmin ==0: - return [(slice(0,leftmax,None),slice(0,bottommax,None)), (slice(None,None,None),slice(None,None,None)), - (slice(0,leftmax,None),slice(topmin,frame.wave.shape[0],None)),(slice(None,None,None),slice(None,None,None))] - else: - return [(slice(0,leftmax,None),slice(0,bottommax,None)), (slice(rightmin,frame.nspec,None),slice(0,bottommax,None)), - (slice(0,leftmax,None),slice(topmin,frame.wave.shape[0],None)),(slice(rightmin,frame.nspec,None),slice(topmin,frame.wave.shape[0]-1,None))] - - -def getrms(image): - """ - Calculate the rms of the pixel values) - - Args: - image: 2d array - """ - pixdata=image.ravel() - rms=np.std(pixdata) - return rms - - -def countpix(image,nsig=None): - """ - Count the pixels above a given threshold in units of sigma. - - Args: - image: 2d image array - nsig: threshold in units of sigma, e.g 2 for 2 sigma - """ - sig=np.std(image.ravel()) - counts_nsig=np.where(image.ravel() > nsig*sig)[0].shape[0] - return counts_nsig - -def countbins(flux,threshold=0): - """ - Count the number of bins above a given threshold on each fiber - - Args: - flux: 2d (nspec,nwave) - threshold: threshold counts - """ - counts=np.zeros(flux.shape[0]) - for ii in range(flux.shape[0]): - ok=np.where(flux[ii]> threshold)[0] - counts[ii]=ok.shape[0] - return counts - -def continuum(wave,flux,wmin=None,wmax=None): - """ - Find the median continuum of the spectrum inside a wavelength region. - - Args: - wave: 1d wavelength array - flux: 1d counts/flux array - wmin and wmax: region to consider for the continuum - """ - if wmin is None: - wmin=min(wave) - if wmax is None: - wmax=max(wave) - - kk=np.where((wave>wmin) & (wave < wmax)) - newwave=wave[kk] - newflux=flux[kk] - #- find the median continuum - medcont=np.median(newflux) - return medcont - -def integrate_spec(wave,flux): - """ - Calculate the integral of the spectrum in the given range using trapezoidal integration - - Note: limits of integration are min and max values of wavelength - - Args: - wave: 1d wavelength array - flux: 1d flux array - """ - integral=np.trapz(flux,wave) - return integral - - -def sky_continuum(frame, wrange1, wrange2): - """ - QA Algorithm for sky continuum. - - To be called from desispec.sky.qa_skysub and - desispec.qa.qa_quicklook.Sky_Continuum.run_qa - - Args: - frame: - wrange1: - wrange2: - - Returns: - skyfiber, contfiberlow, contfiberhigh, meancontfiber, skycont - - """ - #- get the skyfibers first - skyfiber=np.where(frame.fibermap['OBJTYPE']=='SKY')[0] - nspec_sky=skyfiber.shape[0] - if isinstance(wrange1,list): # Offline list format - wminlow,wmaxlow=wrange1 - wminhigh,wmaxhigh=wrange2 - else: # Quick look string format - wminlow,wmaxlow=[float(w) for w in wrange1.split(',')] - wminhigh,wmaxhigh=[float(w) for w in wrange2.split(',')] - selectlow=np.where((frame.wave>wminlow) & (frame.wavewminhigh) & (frame.wave < wmaxhigh))[0] - - contfiberlow=[] - contfiberhigh=[] - meancontfiber=[] - for ii in skyfiber: - contlow=continuum(frame.wave[selectlow],frame.flux[ii,selectlow]) - conthigh=continuum(frame.wave[selecthigh],frame.flux[ii,selecthigh]) - contfiberlow.append(contlow) - contfiberhigh.append(conthigh) - meancontfiber.append(np.mean((contlow,conthigh))) - skycont=np.mean(meancontfiber) #- over the entire CCD (skyfibers) - - # Return - return skyfiber, contfiberlow, contfiberhigh, meancontfiber, skycont - - -def sky_peaks(param, frame, dw=2, amps=False): - - # define sky peaks and wavelength region around peak flux to be integrated - camera = frame.meta['CAMERA'] - peaks=np.array(param['{:s}_PEAKS'.format(camera[0].upper())]) - - nspec_counts=[] - sky_counts=[] - skyfibers = [] - nspec_counts_rms=[] - amp1=[] - amp2=[] - amp3=[] - amp4=[] - rmsamp1=[] - rmsamp2=[] - rmsamp3=[] - rmsamp4=[] - for i in range(frame.flux.shape[0]): - peak_fluxes = [] - for peak in peaks: - iwave = np.argmin(np.abs(frame.wave-peak)) - peak_fluxes.append(np.trapz(frame.flux[i,iwave-dw:iwave+dw+1])) - - # Sum - sum_counts=np.sum(peak_fluxes)/frame.meta["EXPTIME"] - sum_counts_rms=np.sum(peak_fluxes)/np.sqrt(frame.meta["EXPTIME"]) # This looks funny to me.. - nspec_counts.append(sum_counts) - nspec_counts_rms.append(sum_counts_rms) - - # Sky? - if frame.fibermap['OBJTYPE'][i]=='SKY': - - skyfibers.append(i) - sky_counts.append(sum_counts) - ''' - if amps: - if frame.fibermap['FIBER'][i]<240: - if camera[0]=="b": - amp1_flux=peak1_flux/frame.meta["EXPTIME"] - amp3_flux=np.sum((peak2_flux+peak3_flux)/frame.meta["EXPTIME"]) - rmsamp1_flux=peak1_flux/np.sqrt(frame.meta["EXPTIME"]) - rmsamp3_flux=np.sum((peak2_flux+peak3_flux)/np.sqrt(frame.meta["EXPTIME"])) - if camera[0]=="r": - amp1_flux=np.sum((peak1_flux+peak2_flux)/frame.meta["EXPTIME"]) - amp3_flux=np.sum((peak3_flux+peak4_flux+peak5_flux)/frame.meta["EXPTIME"]) - rmsamp1_flux=np.sum((peak1_flux+peak2_flux)/np.sqrt(frame.meta["EXPTIME"])) - rmsamp3_flux=np.sum((peak3_flux+peak4_flux+peak5_flux)/np.sqrt(frame.meta["EXPTIME"])) - if camera[0]=="z": - amp1_flux=np.sum((peak1_flux+peak2_flux+peak3_flux)/frame.meta["EXPTIME"]) - amp3_flux=np.sum((peak4_flux+peak5_flux+peak6_flux)/frame.meta["EXPTIME"]) - rmsamp1_flux=np.sum((peak1_flux+peak2_flux+peak3_flux)/np.sqrt(frame.meta["EXPTIME"])) - rmsamp3_flux=np.sum((peak4_flux+peak5_flux+peak6_flux)/np.sqrt(frame.meta["EXPTIME"])) - amp1.append(amp1_flux) - amp3.append(amp3_flux) - rmsamp1.append(rmsamp1_flux) - rmsamp3.append(rmsamp3_flux) - if frame.fibermap['FIBER'][i]>260: - if camera[0]=="b": - amp2_flux=peak1_flux/frame.meta["EXPTIME"] - amp4_flux=np.sum((peak2_flux+peak3_flux)/frame.meta["EXPTIME"]) - rmsamp2_flux=peak1_flux/np.sqrt(frame.meta["EXPTIME"]) - rmsamp4_flux=np.sum((peak2_flux+peak3_flux)/np.sqrt(frame.meta["EXPTIME"])) - if camera[0]=="r": - amp2_flux=np.sum((peak1_flux+peak2_flux)/frame.meta["EXPTIME"]) - amp4_flux=np.sum((peak3_flux+peak4_flux+peak5_flux)/frame.meta["EXPTIME"]) - rmsamp2_flux=np.sum((peak1_flux+peak2_flux)/np.sqrt(frame.meta["EXPTIME"])) - rmsamp4_flux=np.sum((peak3_flux+peak4_flux+peak5_flux)/np.sqrt(frame.meta["EXPTIME"])) - if camera[0]=="z": - amp2_flux=np.sum((peak1_flux+peak2_flux+peak3_flux)/frame.meta["EXPTIME"]) - amp4_flux=np.sum((peak4_flux+peak5_flux+peak6_flux)/frame.meta["EXPTIME"]) - rmsamp2_flux=np.sum((peak1_flux+peak2_flux+peak3_flux)/np.sqrt(frame.meta["EXPTIME"])) - rmsamp4_flux=np.sum((peak4_flux+peak5_flux+peak6_flux)/np.sqrt(frame.meta["EXPTIME"])) - amp2.append(amp2_flux) - amp4.append(amp4_flux) - rmsamp2.append(rmsamp2_flux) - rmsamp4.append(rmsamp4_flux) - ''' - nskyfib=len(skyfibers) - nspec_counts = np.array(nspec_counts) - sky_counts = np.array(sky_counts) - # Return - return nspec_counts, sky_counts, skyfibers, nskyfib - - -def sky_resid(param, frame, skymodel, quick_look=False): - """ QA Algorithm for sky residual - To be called from desispec.sky.qa_skysub and desispec.qa.qa_quicklook.Sky_residual.run_qa - Args: - param : dict of QA parameters - frame : desispec.Frame object after sky subtraction - skymodel : desispec.SkyModel object - Returns a qa dictionary for sky resid - """ - # Output dict - qadict = {} - - if quick_look: - qadict['RA'] = frame.fibermap['TARGET_RA'] - qadict['DEC'] = frame.fibermap['TARGET_DEC'] - - # Grab sky fibers on this frame - skyfibers = np.where(frame.fibermap['OBJTYPE'] == 'SKY')[0] - assert np.max(skyfibers) < 500 #- indices, not fiber numbers - nfibers=len(skyfibers) - qadict['NSKY_FIB'] = int(nfibers) - - - #- Residuals - res=frame.flux[skyfibers] #- as this frame is already sky subtracted - res_ivar=frame.ivar[skyfibers] - - # Chi^2 and Probability - chi2_fiber = np.sum(res_ivar*(res**2),1) - dof = np.sum(res_ivar > 0., axis=1) - chi2_prob = scipy.stats.distributions.chi2.sf(chi2_fiber, dof) - - # Bad models - qadict['NBAD_PCHI'] = int(np.sum(chi2_prob < param['PCHI_RESID'])) - if qadict['NBAD_PCHI'] > 0: - log.warning("Bad Sky Subtraction in {:d} fibers".format( - qadict['NBAD_PCHI'])) - - # Median residual - qadict['RESID'] = float(np.median(res)) # Median residual (counts) - log.info("Median residual for sky fibers = {:g}".format( - qadict['RESID'])) - - # Residual percentiles - perc = dustat.perc(res, per=param['PER_RESID']) - qadict['RESID_PER'] = [float(iperc) for iperc in perc] - - qadict["SKYFIBERID"]=skyfibers.tolist() - #- Residuals in wave and fiber axes - if quick_look: - qadict["MED_RESID_WAVE"]=np.median(res,axis=0) - qadict["MED_RESID_FIBER"]=np.median(res,axis=1) - #- Weighted average for each bin on all fibers - qadict["WAVG_RES_WAVE"]=np.zeros(res.shape[1]) - sw=np.sum(res_ivar,axis=0) - qadict["WAVG_RES_WAVE"][sw>0] = np.sum(res*res_ivar,axis=0)[sw>0] / sw[sw>0] - - #- Histograms for residual/sigma #- inherited from qa_plots.frame_skyres() - if quick_look: - binsz = param['BIN_SZ'] - gd_res = res_ivar > 0. - devs = res[gd_res] * np.sqrt(res_ivar[gd_res]) - i0, i1 = int( np.min(devs) / binsz) - 1, int( np.max(devs) / binsz) + 1 - rng = tuple( binsz*np.array([i0,i1]) ) - nbin = i1-i0 - hist, edges = np.histogram(devs, range=rng, bins=nbin) - - #SE: commented this because didn't seem to be needed to be saved in the dictionary - #qadict['DEVS_1D'] = hist.tolist() #- histograms for deviates - #qadict['DEVS_EDGES'] = edges.tolist() #- Bin edges - - #- Add additional metrics for quicklook - if quick_look: - qadict["WAVELENGTH"]=frame.wave - # Return - return qadict - - -def SN_ratio(flux,ivar): - """ - SN Ratio - median snr for the spectra, flux should be sky subtracted. - - Args: - flux (array): 2d [nspec,nwave] the signal (typically for spectra, - this comes from frame object - ivar (array): 2d [nspec,nwave] corresponding inverse variance - - Returns: - medsnr (array): 1d [nspec] - """ - #- we calculate median and total S/N assuming no correlation bin by bin - snr = flux * np.sqrt(ivar) - medsnr = np.median(snr, axis=1) - return medsnr #, totsnr - - -def _get_mags(frame): - '''Extract frame.fibermap fluxes into mags depending upon camera - - Args: - frame: Frame object - - Returns array of magnitudes, using 99.0 when flux<0 - - b camera frames return g-band magnitudes; - r camera -> r-mags; z camera -> z-mags - ''' - camera = frame.meta['CAMERA'].lower() - if camera.startswith('b'): - flux = frame.fibermap['FLUX_G'] - elif camera.startswith('r'): - flux = frame.fibermap['FLUX_R'] - elif camera.startswith('z'): - flux = frame.fibermap['FLUX_Z'] - else: - raise ValueError('camera {} should start with b,r,z'.format(camera)) - - mags = np.zeros(len(flux)) + 99.0 #- use 99 for bad mags - ii = flux>0 - mags[ii] = 22.5 - 2.5*np.log10(flux[ii]) - - return mags - -def SignalVsNoise(frame,params,fidboundary=None): - """ - Signal vs. Noise - - Take flux and inverse variance arrays and calculate S/N for individual - targets (ELG, LRG, QSO, STD) and for each amplifier of the camera. - - Args: - flux (array): 2d [nspec,nwave] the signal (typically for spectra, - this comes from frame object - ivar (array): 2d [nspec,nwave] corresponding inverse variance - fidboundary : list of slices indicating where to select in fiber - and wavelength directions for each amp (output of slice_fidboundary function) - """ - mags = _get_mags(frame) - - medsnr=SN_ratio(frame.flux,frame.ivar) - - #- Calculate median SNR per bin and associate with imaging Mag. for ELG fibers - elgfibers=np.where((frame.fibermap['DESI_TARGET'] & desi_mask.ELG) != 0)[0] - elg_medsnr=medsnr[elgfibers] - elg_mag=mags[elgfibers] - elg_snr_mag=np.array((elg_medsnr,elg_mag)) #- not storing fiber number - - #- Calculate median SNR, associate with imaging Mag for LRGs - lrgfibers=np.where((frame.fibermap['DESI_TARGET'] & desi_mask.LRG) != 0)[0] - lrg_medsnr=medsnr[lrgfibers] - lrg_mag=mags[lrgfibers] - lrg_snr_mag=np.array((lrg_medsnr,lrg_mag)) - - #- Calculate median SNR, associate with imaging Mag. for QSOs - qsofibers=np.where((frame.fibermap['DESI_TARGET'] & desi_mask.QSO) != 0)[0] - qso_medsnr=medsnr[qsofibers] - qso_mag=mags[qsofibers] - qso_snr_mag=np.array((qso_medsnr,qso_mag)) - - #- Calculate median SNR, associate with Mag. for STD stars - stdfibers=np.where(isStdStar(frame.fibermap))[0] - std_medsnr=medsnr[stdfibers] - std_mag=mags[stdfibers] - std_snr_mag=np.array((std_medsnr,std_mag)) - - #- Median S/N for different amp zones. - average_amp = None - if fidboundary is not None: - averages=[] - for ii in range(4): - if fidboundary[ii][0].start is not None: #- have fibers in this amp? - medsnramp=SN_ratio(frame.flux[fidboundary[ii]],frame.ivar[fidboundary[ii]]) - averages.append(np.mean(medsnramp)) - else: - averages.append(None) - - average_amp=np.array(averages) - - elg_fidmag_snr = [] - star_fidmag_snr = [] - - ra = frame.fibermap['TARGET_RA'] - dec = frame.fibermap['TARGET_DEC'] - - #- fill QA dict with metrics: - qadict={ - "RA":ra, "DEC":dec, - "MEDIAN_SNR":medsnr, - "MEDIAN_AMP_SNR":average_amp, - "ELG_FIBERID":elgfibers.tolist(), - "ELG_SNR_MAG": elg_snr_mag, - "LRG_FIBERID":lrgfibers.tolist(), - "LRG_SNR_MAG": lrg_snr_mag, - "QSO_FIBERID": qsofibers.tolist(), - "QSO_SNR_MAG": qso_snr_mag, - "STAR_FIBERID": stdfibers.tolist(), - "STAR_SNR_MAG":std_snr_mag, - "ELG_FIDMAG_SNR":elg_fidmag_snr, - "STAR_FIDMAG_SNR":star_fidmag_snr - } - - return qadict - -def s2n_funcs(exptime=None): - """ - Functions for fitting S/N - - Args: - exptime: float, optional - - Returns: - funcMap: dict - - """ - funcMap={"linear":lambda x,a,b:a+b*x, - "poly":lambda x,a,b,c:a+b*x+c*x**2, - "astro":lambda x,a,b:(exptime*a*x)/np.sqrt(exptime*(a*x+b)) - } - return funcMap - -def s2n_flux_astro(flux, A, B): - """ - Function for a normalized (by texp**1/2) curve to flux vs S/N - - Args: - flux (float or np.ndarray): - Flux value(s) - A (float): - Scale coefficient - B (float): - Offset coefficient - - Returns: - S/N at the input flux - - """ - return flux*A/np.sqrt(A*flux + B) - - -def s2nfit(frame, camera, params): - """ - Signal vs. Noise With fitting - - Take flux and inverse variance arrays and calculate S/N for individual - targets (ELG, LRG, QSO, STD) and for each amplifier of the camera. - then fit snr=A*mag/sqrt(A*mag+B) - - see http://arXiv.org/abs/0706.1062v2 for proper fitting of power-law distributions - it is not implemented here! - - Instead we use scipy.optimize.curve_fit - - Args: - frame: desispec.Frame object - camera: str, name of the camera - params: parameters dictionary for S/N - - Returns: - qadict : dict - MEDIAN_SNR (ndarray, nfiber): Median S/N of light in each fiber - FIT_FILTER (str): Filter used for the fluxes - EXPTIME (float): Exposure time - XXX_FIBERID (list): Fibers matching ELG, LRG, BGS, etc. - SNR_MAG_TGT (list): List of lists with S/N and mag of ELG, LRG, BGS, etc. - FITCOEFF_TGT (list): List of fitted coefficients. Junk fits have np.nan - OBJLIST (list): List of object types analyzed (1 or more fiber) - """ - # Median snr - snr = frame.flux * np.sqrt(frame.ivar) - mediansnr = np.median(snr, axis=1) - qadict = {"MEDIAN_SNR": mediansnr} - exptime = frame.meta["EXPTIME"] - - # Parse filters - if "Filter" in params: - thisfilter = params["Filter"] - elif camera[0] == 'b': - thisfilter = 'DECAM_G' - elif camera[0] == 'r': - thisfilter = 'DECAM_R' - else: - thisfilter = 'DECAM_Z' - - qadict["FIT_FILTER"] = thisfilter - qadict["EXPTIME"] = exptime - - if thisfilter in ('DECAM_G', 'BASS_G'): - photflux = frame.fibermap['FLUX_G'] - elif thisfilter in ('DECAM_R', 'BASS_R'): - photflux = frame.fibermap['FLUX_R'] - elif thisfilter in ('DECAM_Z', 'MZLS_Z'): - photflux = frame.fibermap['FLUX_Z'] - else: - raise ValueError('Unknown filter {}'.format(thisfilter)) - - # - Loop over each target type, and associate SNR and image magnitudes for each type. - fitcoeff = [] - snrmag = [] - fitsnr = [] - fitT = [] - elgfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.ELG) != 0)[0] - lrgfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.LRG) != 0)[0] - qsofibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.QSO) != 0)[0] - bgsfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.BGS_ANY) != 0)[0] - mwsfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.MWS_ANY) != 0)[0] - stdfibers = np.where(isStdStar(frame.fibermap))[0] - - for T, fibers in ( - ['ELG', elgfibers], - ['LRG', lrgfibers], - ['QSO', qsofibers], - ['BGS', bgsfibers], - ['MWS', mwsfibers], - ['STAR', stdfibers], - ): - if len(fibers) == 0: - continue - - # S/N of the fibers - medsnr = mediansnr[fibers] - mags = np.zeros(medsnr.shape) - fit_these = photflux[fibers] > 0 - mags[fit_these] = 22.5 - 2.5 * np.log10(photflux[fibers][fit_these]) - - # Fit - try: - popt, pcov = optimize.curve_fit(s2n_flux_astro, photflux[fibers][fit_these].data, - medsnr[fit_these]/exptime**(1/2), p0=(0.02, 1.)) - except RuntimeError: - fitcoeff.append([np.nan, np.nan]) - else: - fitcoeff.append([popt[0], popt[1]]) - # Save - fitT.append(T) - - qadict["{:s}_FIBERID".format(T)] = fibers.tolist() - snr_mag = [medsnr.tolist(), mags.tolist()] - snrmag.append(snr_mag) - - # Save - qadict["SNR_MAG_TGT"] = snrmag - qadict["FITCOEFF_TGT"] = fitcoeff - qadict["OBJLIST"] = fitT - # Return - return qadict, fitsnr - - -def orig_SNRFit(frame,night,camera,expid,params,fidboundary=None, - offline=False): - """ - Signal vs. Noise With fitting - - Take flux and inverse variance arrays and calculate S/N for individual - targets (ELG, LRG, QSO, STD) and for each amplifier of the camera. - then fit the log(snr)=a+b*mag or log(snr)=poly(mag) - - see http://arXiv.org/abs/0706.1062v2 for proper fitting of power-law distributions - it is not implemented here! - - qadict has the following data model - "MAGNITUDES" : ndarray - Depends on camera (DECAM_G, DECAM_R, DECAM_Z) - "MEDIAN_SNR" : ndarray (nfiber) - "NUM_NEGATIVE_SNR" : int - "SNR_MAG_TGT" - "FITCOEFF_TGT" : list - "SNR_RESID" : list, can be trimmed down during the fitting - "FIDSNR_TGT" - "RA" : ndarray (nfiber) - "DEC" : ndarray (nfiber) - "OBJLIST" : list - Save a copy to make sense of the list order later - "EXPTIME" : float - "FIT_FILTER" : str - "r2" : float - Fitting parameter - - Args: - frame: desispec.Frame object - night : - camera : - expid : int - params: parameters dictionary - { - "Func": "linear", # Fit function type one of ["linear","poly","astro"] - "FIDMAG": 22.0, # magnitude to evaluate the fit - "Filter":"DECAM_R", #filter name - } - - fidboundary : list of slices indicating where to select in fiber - and wavelength directions for each amp (output of slice_fidboundary function) - offline: bool, optional - If True, save things differently for offline - - Returns: - qadict : dict - """ - print("Starting SNR Fit") - - #- Get imaging magnitudes and calculate SNR - fmag=22.0 - if "FIDMAG" in params: - fmag=params["FIDMAG"] - mediansnr=SN_ratio(frame.flux,frame.ivar) - qadict={"MEDIAN_SNR":mediansnr} - exptime=frame.meta["EXPTIME"] - ivar=frame.ivar - - if "Filter" in params: - thisfilter=params["Filter"] - elif camera[0] == 'b': - thisfilter='DECAM_G' - elif camera[0] =='r': - thisfilter='DECAM_R' - else: - thisfilter='DECAM_Z' - - qadict["FIT_FILTER"] = thisfilter - qadict["EXPTIME"] = exptime - - if thisfilter in ('DECAM_G', 'BASS_G'): - photflux = frame.fibermap['FLUX_G'] - elif thisfilter in ('DECAM_R', 'BASS_R'): - photflux = frame.fibermap['FLUX_R'] - elif thisfilter in ('DECAM_Z', 'MZLS_Z'): - photflux = frame.fibermap['FLUX_Z'] - else: - raise ValueError('Unknown filter {}'.format(thisfilter)) - - mag_grz = np.zeros((3, frame.nspec)) + 99.0 - for i, colname in enumerate(['FLUX_G', 'FLUX_R', 'FLUX_Z']): - ok = frame.fibermap[colname] > 0 - mag_grz[i, ok] = 22.5 - 2.5 * np.log10(frame.fibermap[colname][ok]) - - qadict["FILTERS"] = ['G', 'R', 'Z'] - - #qadict["OBJLIST"]=list(objlist) - - #- Set up fit of SNR vs. Magnitude - -# RS: commenting this until we have flux calibration -# try: -# #- Get read noise from Get_RMS TODO: use header information for this -# rfile=findfile('ql_getrms_file',int(night),int(expid),camera,specprod_dir=os.environ['QL_SPEC_REDUX']) -# with open(rfile) as rf: -# rmsfile=yaml.safe_load(rf) -# rmsval=rmsfile["METRICS"]["NOISE"] -# #- The factor of 1e-3 is a very basic (and temporary!!) flux calibration -# #- used to convert read noise to proper flux units -# r2=1e-3*rmsval**2 -# except: -# log.info("Was not able to obtain read noise from prior knowledge, fitting B+R**2...") - - # Use astronomically motivated function for SNR fit - funcMap = s2n_funcs(exptime=exptime) - fit = funcMap['astro'] - - # Use median inverse variance of each fiber for chi2 minimization - var=[] - for i in range(len(ivar)): - var.append(1/np.median(ivar[i])) - - neg_snr_tot=[] - #- neg_snr_tot counts the number of times a fiber has a negative median SNR. This should - #- not happen for non-sky fibers with actual flux in them. However, it does happen rarely - #- in sims. To avoid this, we omit such fibers in the fit, but keep count for diagnostic - #- purposes. - - #- Loop over each target type, and associate SNR and image magnitudes for each type. - resid_snr=[] - fidsnr_tgt=[] - fitcoeff=[] - fitcovar=[] - snrmag=[] - fitsnr=[] - fitT = [] - elgfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.ELG) != 0)[0] - lrgfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.LRG) != 0)[0] - qsofibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.QSO) != 0)[0] - bgsfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.BGS_ANY) != 0)[0] - mwsfibers = np.where((frame.fibermap['DESI_TARGET'] & desi_mask.MWS_ANY) != 0)[0] - stdfibers = np.where(isStdStar(frame.fibermap))[0] - - for T, fibers in ( - ['ELG', elgfibers], - ['LRG', lrgfibers], - ['QSO', qsofibers], - ['BGS', bgsfibers], - ['MWS', mwsfibers], - ['STAR', stdfibers], - ): - if len(fibers) == 0: - continue - - # S/N - objvar = np.array(var)[fibers] - medsnr = mediansnr[fibers] - all_medsnr = medsnr.copy() # In case any are cut below - mags = np.zeros(medsnr.shape) - ok = (photflux[fibers] > 0) - mags[ok] = 22.5 - 2.5 * np.log10(photflux[fibers][ok]) - - try: - #- Determine negative SNR and mag values and remove - neg_snr=len(np.where(medsnr<=0.0)[0]) - neg_snr_tot.append(neg_snr) - xs=mags.argsort() - #- Convert magnitudes to flux - x=10**(-0.4*(mags[xs]-22.5)) - med_snr=medsnr[xs] - y=med_snr - #- Fit SNR vs. Magnitude using chi squared minimization, - #- evaluate at fiducial magnitude, and store results in METRICS - #- Set high minimum initally chi2 value to be overwritten when fitting - minchi2=1e10 - for a in range(100): - for b in range(100): - guess=[0.01*a,0.1*b] - fitdata=fit(x,guess[0],guess[1]) - totchi2=[] - for k in range(len(x)): - singlechi2=((y[k]-fitdata[k])/objvar[k])**2 - totchi2.append(singlechi2) - chi2=np.sum(totchi2) - if chi2<=minchi2: - minchi2=chi2 - fita=guess[0] - fitb=guess[1] - #- Increase granualarity of 'a' by a factor of 10 - fitc = fita # In case we don't improve chi^2 - for c in range(100): - for d in range(100): - guess=[fita-0.05+0.001*c,0.1*d] - fitdata=fit(x,guess[0],guess[1]) - totchi2=[] - for k in range(len(x)): - singlechi2=((y[k]-fitdata[k])/objvar[k])**2 - totchi2.append(singlechi2) - chi2=np.sum(totchi2) - if chi2<=minchi2: - minchi2=chi2 - fitc=guess[0] - fitd=guess[1] - #- Increase granualarity of 'a' by another factor of 10 - for e in range(100): - for f in range(100): - guess=[fitc-0.005+0.0001*e,0.1*f] - fitdata=fit(x,guess[0],guess[1]) - totchi2=[] - for k in range(len(x)): - singlechi2=((y[k]-fitdata[k])/objvar[k])**2 - totchi2.append(singlechi2) - chi2=np.sum(totchi2) - if chi2<=minchi2: - minchi2=chi2 - fite=guess[0] - fitf=guess[1] - # Save - fitcoeff.append([fite,fitf]) - fidsnr_tgt.append(fit(10**(-0.4*(fmag-22.5)),fita,fitb)) - fitT.append(T) - except RuntimeError: - log.warning("In fit of {}, Fit minimization failed!".format(T)) - fitcoeff.append(np.nan) - fidsnr_tgt.append(np.nan) - - qadict["{:s}_FIBERID".format(T)]=fibers.tolist() - if offline: - snr_mag=[medsnr,mags] - snrmag.append(snr_mag) - else: - snr_mag=[all_medsnr,mags] - snrmag.append(snr_mag) - - #- Calculate residual SNR for focal plane plots - if not offline: - fit_snr = fit(x,fite,fitf) - fitsnr.append(fit_snr) - resid = (med_snr-fit_snr)/fit_snr - resid_snr += resid.tolist() - else: - x=10**(-0.4*(mags-22.5)) - fit_snr = fit(x,fite,fitf) - fitsnr.append(fit_snr) - resid = (all_medsnr-fit_snr)/fit_snr - resid_snr += resid.tolist() - - - qadict["NUM_NEGATIVE_SNR"]=sum(neg_snr_tot) - qadict["SNR_MAG_TGT"]=snrmag - qadict["FITCOEFF_TGT"]=fitcoeff - qadict["SNR_RESID"]=resid_snr - qadict["FIDSNR_TGT"]=fidsnr_tgt - qadict["OBJLIST"]=fitT - qadict["RA"]=frame.fibermap['TARGET_RA'] - qadict["DEC"]=frame.fibermap['TARGET_DEC'] - - print("End SNR Fit") - return qadict,fitsnr - -def gauss(x,a,mu,sigma): - """ - Gaussian fit of input data - """ - return a*np.exp(-(x-mu)**2/(2*sigma**2)) diff --git a/deprecated/py/desispec/qa/utils.py b/deprecated/py/desispec/qa/utils.py deleted file mode 100644 index 453555ab5..000000000 --- a/deprecated/py/desispec/qa/utils.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -desispec.qa.utils -================= - -Module for QA support. -""" -from __future__ import print_function, absolute_import, division - -import numpy as np - -def get_skyres(cframes, sub_sky=False, flatten=True): - """ - Args: - cframes: str or list - Single cframe or a list of them - sub_sky: bool, optional - Subtract the sky? This should probably not be done - flatten: bool, optional - Return a flat, 1D array for each variable - combine: bool, optional - combine the individual sky fibers? Median 'smash' - - Returns: - wave : ndarray - flux : ndarray - res : ndarray - ivar : ndarray - - """ - from desispec.io import read_frame - from desispec.io.sky import read_sky - from desispec.sky import subtract_sky - - if isinstance(cframes,list): - all_wave, all_flux, all_res, all_ivar = [], [], [], [] - for cframe_file in cframes: - wave, flux, res, ivar = get_skyres(cframe_file, flatten=flatten) - # Save - all_wave.append(wave) - all_flux.append(flux) - all_res.append(res) - all_ivar.append(ivar) - # Concatenate -- Shape is preserved (nfibers, npix) - twave = np.concatenate(all_wave) - tflux = np.concatenate(all_flux) - tres = np.concatenate(all_res) - tivar = np.concatenate(all_ivar) - # Return - return twave, tflux, tres, tivar - - cframe = read_frame(cframes, skip_resolution=True) - if cframe.meta['FLAVOR'] in ['flat','arc']: - raise ValueError("Bad flavor for exposure: {:s}".format(cframes)) - - # Sky - sky_file = cframes.replace('cframe', 'sky') - skymodel = read_sky(sky_file) - if sub_sky: - subtract_sky(cframe, skymodel) - # Resid - skyfibers = np.where(cframe.fibermap['OBJTYPE'] == 'SKY')[0] - res = cframe.flux[skyfibers] # Flux calibrated - ivar = cframe.ivar[skyfibers] # Flux calibrated - flux = skymodel.flux[skyfibers] # Residuals; not flux calibrated! - wave = np.outer(np.ones(flux.shape[0]), cframe.wave) - # Combine? - ''' - if combine: - res = np.median(res, axis=0) - ivar = np.median(ivar, axis=0) - flux = np.median(flux, axis=0) - wave = np.median(wave, axis=0) - ''' - # Return - if flatten: - return wave.flatten(), flux.flatten(), res.flatten(), ivar.flatten() - else: - return wave, flux, res, ivar diff --git a/deprecated/py/desispec/quicklook/__init__.py b/deprecated/py/desispec/quicklook/__init__.py deleted file mode 100644 index 44449fb1e..000000000 --- a/deprecated/py/desispec/quicklook/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -desispec.quicklook -================== - -""" -# help with 2to3 support. -from __future__ import absolute_import, division, print_function - -#SE: needed to maintain quicklook-specific version -__qlversion__ = "19.2.0" -#from . import rawimage -#import MAs -#from desispec.qa import MonAlgs -#from . import ProcAlgs -#from . import QLHeartbeat -#from . import QLLogger -#from . import QL diff --git a/deprecated/py/desispec/quicklook/arcprocess.py b/deprecated/py/desispec/quicklook/arcprocess.py deleted file mode 100644 index 9b2612d5a..000000000 --- a/deprecated/py/desispec/quicklook/arcprocess.py +++ /dev/null @@ -1,160 +0,0 @@ -""" -desispec.quicklook.arcprocess -============================= - -""" -import numpy as np -import scipy.optimize -from numpy.polynomial.legendre import Legendre, legval, legfit -from desispec.quicklook import qlexceptions,qllogger -from desispec.io import read_xytraceset, write_xytraceset -from specter.util.traceset import TraceSet,fit_traces - -qlog=qllogger.QLLogger("QuickLook",20) -log=qlog.getlog() - -def sigmas_from_arc(wave,flux,ivar,linelist,n=2): - """ - Gaussian fitting of listed arc lines and return corresponding sigmas in pixel units - Args: - linelist: list of lines (A) for which fit is to be done - n: fit region half width (in bin units): n=2 bins => (2*n+1)=5 bins fitting window. - """ - - nwave=wave.shape - - #- select the closest match to given lines - ind=[(np.abs(wave-line)).argmin() for line in linelist] - - #- fit gaussian obout the peaks - meanwaves=np.zeros(len(ind)) - emeanwaves=np.zeros(len(ind)) - sigmas=np.zeros(len(ind)) - esigmas=np.zeros(len(ind)) - - for jj,index in enumerate(ind): - thiswave=wave[index-n:index+n+1]-linelist[jj] #- fit window about 0 - thisflux=flux[index-n:index+n+1] - thisivar=ivar[index-n:index+n+1] - - #RS: skip lines with zero flux - if 0. not in thisflux: - spots=thisflux/thisflux.sum() - try: - popt,pcov=scipy.optimize.curve_fit(_gauss_pix,thiswave,spots) - meanwaves[jj]=popt[0]+linelist[jj] - if pcov[0,0] >= 0.: - emeanwaves[jj]=pcov[0,0]**0.5 - sigmas[jj]=popt[1] - if pcov[1,1] >= 0.: - esigmas[jj]=(pcov[1,1]**0.5) - except: - pass - - k=np.logical_and(~np.isnan(esigmas),esigmas!=np.inf) - sigmas=sigmas[k] - meanwaves=meanwaves[k] - esigmas=esigmas[k] - return meanwaves,emeanwaves,sigmas,esigmas - -def fit_wsigmas(means,wsigmas,ewsigmas,npoly=2,domain=None): - #- return callable legendre object - wt=1/ewsigmas**2 - legfit = Legendre.fit(means, wsigmas, npoly, domain=domain,w=wt) - - return legfit - -def _gauss_pix(x,mean,sigma): - x=(np.asarray(x,dtype=float)-mean)/(sigma*np.sqrt(2)) - dx=x[1]-x[0] #- uniform spacing - edges= np.concatenate((x-dx/2, x[-1:]+dx/2)) - y=scipy.special.erf(edges) - return (y[1:]-y[:-1])/2 - -def process_arc(frame,linelist=None,npoly=2,nbins=2,domain=None): - """ - frame: desispec.frame.Frame object, preumably resolution not evaluated. - linelist: line list to fit - npoly: polynomial order for sigma expansion - nbins: no of bins for the half of the fitting window - return: coefficients of the polynomial expansion - - """ - - if domain is None : - raise ValueError("domain must be given in process_arc") - - nspec=frame.flux.shape[0] - if linelist is None: - camera=frame.meta["CAMERA"] - #- load arc lines - from desispec.bootcalib import load_arcline_list, load_gdarc_lines,find_arc_lines - llist=load_arcline_list(camera) - dlamb,gd_lines=load_gdarc_lines(camera,llist) - linelist=gd_lines - #linelist=[5854.1101,6404.018,7034.352,7440.9469] #- not final - log.info("No line list configured. Fitting for lines {}".format(linelist)) - coeffs=np.zeros((nspec,npoly+1)) #- coeffs array - - for spec in range(nspec): - #- Allow arc processing to use either QL or QP extraction - if isinstance(frame.wave[0],float): - wave=frame.wave - else: - wave=frame.wave[spec] - - flux=frame.flux[spec] - ivar=frame.ivar[spec] - - #- amend line list to only include lines in given wavelength range - if wave[0] >= linelist[0]: - noline_ind_lo=np.where(np.array(linelist)<=wave[0]) - linelist=linelist[np.max(noline_ind_lo[0])+1:len(linelist)-1] - log.info("First {} line(s) outside wavelength range, skipping these".format(len(noline_ind_lo[0]))) - if wave[len(wave)-1] <= linelist[len(linelist)-1]: - noline_ind_hi=np.where(np.array(linelist)>=wave[len(wave)-1]) - linelist=linelist[0:np.min(noline_ind_hi[0])-1] - log.info("Last {} line(s) outside wavelength range, skipping these".format(len(noline_ind_hi[0]))) - - meanwaves,emeanwaves,sigmas,esigmas=sigmas_from_arc(wave,flux,ivar,linelist,n=nbins) - if domain is None: - domain=(np.min(wave),np.max(wave)) - - # RS: if Gaussian couldn't be fit to a line, don't do legendre fit for fiber - if 0. in sigmas or 0. in esigmas: - pass - else: - try: - thislegfit=fit_wsigmas(meanwaves,sigmas,esigmas,domain=domain,npoly=npoly) - coeffs[spec]=thislegfit.coef - except: - pass - - # need to return the wavemin and wavemax of the fit - return coeffs,domain[0],domain[1] - -def write_psffile(infile,wcoeffs,wcoeffs_wavemin,wcoeffs_wavemax,outfile,wavestepsize=None): - """ - extract psf file, add wcoeffs, and make a new psf file preserving the traces etc. - psf module will load this - """ - - tset = read_xytraceset(infile) - - # convert wsigma to ysig ... - nfiber = wcoeffs.shape[0] - ncoef = wcoeffs.shape[1] - nw = 100 # need a larger number than ncoef to get an accurate dydw from the gradients - - # wcoeffs and tset do not necessarily have the same wavelength range - wave = np.linspace(tset.wavemin,tset.wavemax,nw) - wsig_set = TraceSet(wcoeffs,[wcoeffs_wavemin,wcoeffs_wavemax]) - wsig_vals = np.zeros((nfiber,nw)) - for f in range(nfiber) : - y_vals = tset.y_vs_wave(f,wave) - dydw = np.gradient(y_vals)/np.gradient(wave) - wsig_vals[f]=wsig_set.eval(f,wave)*dydw - tset.ysig_vs_wave_traceset = fit_traces(wave, wsig_vals, deg=ncoef-1, domain=(tset.wavemin,tset.wavemax)) - - write_xytraceset(outfile,tset) - diff --git a/deprecated/py/desispec/quicklook/merger.py b/deprecated/py/desispec/quicklook/merger.py deleted file mode 100644 index 704959865..000000000 --- a/deprecated/py/desispec/quicklook/merger.py +++ /dev/null @@ -1,367 +0,0 @@ -""" -desispec.quicklook.merger -========================= - -A class to merge quicklook qa outputs. -""" -from __future__ import absolute_import, division, print_function -from desiutil.io import yamlify -import yaml -import json -import numpy as np -import datetime -import pytz - -############################################################### -def remove_task(myDict, Key): - if Key in myDict: - del myDict[Key] - return myDict -############################################################### -def rename_task(myDict, oldKey, newKey): - - if oldKey in myDict: - - task_data = myDict[oldKey] - del myDict[oldKey] - myDict[newKey] = task_data - - return myDict -############################################################### -## KeyHead = "KeyHead" or "PARAMS" - -def transferKEY(myDict, KeyHead, old_task, new_task, keyList): - - if old_task in myDict and new_task in myDict: - for key in keyList: - if key in myDict[old_task][KeyHead]: - data = myDict[old_task][KeyHead][key] - del myDict[old_task][KeyHead][key] - myDict[new_task][KeyHead][key] = data - - return myDict - -############################################################### -### Please Give the correct Re-arrangmenet recipe here ... - -def modify_tasks(myDict): - - ################ - ### Moving all keys in keyList under Metrics (from PREPROC to BOXCAREXTRACT) - keyList = ["XWSIGMA", "XWSIGMA_AMP", "XWSIGMA_STATUS"] - if "EXTRACT_QP" in myDict: - myDict = transferKEY(myDict, "METRICS", "EXTRACT_QP", "PREPROC", keyList) - elif "BOXCAREXTRACT" in myDict: - myDict = transferKEY(myDict, "METRICS", "BOXCAREXTRACT", "PREPROC", keyList) - - ################ - keyList = ["XWSIGMA_NORMAL_RANGE", "XWSIGMA_REF", "XWSIGMA_WARN_RANGE"] - if "EXTRACT_QP" in myDict: - myDict = transferKEY(myDict, "PARAMS", "EXTRACT_QP", "PREPROC",keyList) - elif "BOXCAREXTRACT" in myDict: - myDict = transferKEY(myDict, "PARAMS", "BOXCAREXTRACT", "PREPROC",keyList) - - ################ - keyList = ["CHECKHDUS","EXPNUM","CHECKHDUS_STATUS","EXPNUM_STATUS"] - myDict = transferKEY(myDict, "METRICS", "INITIALIZE", "PREPROC", keyList) - - ################ - - keyList = ["XYSHIFTS","XYSHIFTS_STATUS"] - if "EXTRACT_QP" in myDict: - myDict = transferKEY(myDict, "METRICS", "FLEXURE", "EXTRACT_QP", keyList) - elif "BOXCAREXTRACT" in myDict: - myDict = transferKEY(myDict, "METRICS", "FLEXURE", "BOXCAREXTRACT", keyList) - - ################ - keyList = ["XYSHIFTS_NORMAL_RANGE", "XYSHIFTS_WARN_RANGE", "XYSHIFTS_DARK_REF", "XYSHIFTS_GRAY_REF","XYSHIFTS_BRIGHT_REF"] - if "EXTRACT_QP" in myDict: - myDict = transferKEY(myDict, "PARAMS", "FLEXURE", "EXTRACT_QP", keyList) - elif "BOXCAREXTRACT" in myDict: - myDict = transferKEY(myDict, "PARAMS", "FLEXURE", "BOXCAREXTRACT", keyList) - - ################ - keyList = ["PEAKCOUNT","PEAKCOUNT_FIB","PEAKCOUNT_NOISE","PEAKCOUNT_STATUS","SKYCONT","SKYCONT_FIBER","SKYCONT_STATUS","SKYRBAND","SKY_RFLUX_DIFF","SKY_FIB_RBAND","FIDSNR_TGT","FIDSNR_TGT_STATUS","FITCOEFF_TGT","MEDIAN_SNR","NUM_NEGATIVE_SNR","SNR_MAG_TGT","SNR_RESID","OBJLIST"] - if "APPLYFIBERFLAT_QP" in myDict: - myDict = transferKEY(myDict, "METRICS", "APPLYFIBERFLAT_QP", "SKYSUB_QP", keyList) - myDict = transferKEY(myDict, "METRICS", "SKYSUB_QP", "APPLYFLUXCALIBRATION", keyList) - elif "APPLYFIBERFLAT_QL" in myDict: - myDict = transferKEY(myDict, "METRICS", "APPLYFIBERFLAT_QL", "SKYSUB_QL", keyList) - myDict = transferKEY(myDict, "METRICS", "SKYSUB_QL", "APPLYFLUXCALIBRATION", keyList) - - ################ - keyList = ["B_CONT","R_CONT","Z_CONT","PEAKCOUNT_NORMAL_RANGE","PEAKCOUNT_BRIGHT_REF","PEAKCOUNT_DARK_REF","PEAKCOUNT_GRAY_REF","PEAKCOUNT_WARN_RANGE","SKYCONT_NORMAL_RANGE","SKYCONT_REF","SKYCONT_WARN_RANGE","SKYCONT_BRIGHT_REF","SKYCONT_DARK_REF","SKYCONT_GRAY_REF","RESIDUAL_CUT","SIGMA_CUT","FIDSNR_TGT_NORMAL_RANGE","FIDSNR_TGT_WARN_RANGE","FIDSNR_TGT_BRIGHT_REF","FIDSNR_TGT_DARK_REF","FIDSNR_TGT_GRAY_REF","FIDMAG"] - if "APPLYFIBERFLAT_QP" in myDict: - myDict = transferKEY(myDict, "PARAMS", "APPLYFIBERFLAT_QP", "SKYSUB_QP", keyList) - myDict = transferKEY(myDict, "PARAMS", "SKYSUB_QP", "APPLYFLUXCALIBRATION", keyList) - elif "APPLYFIBERFLAT_QL" in myDict: - myDict = transferKEY(myDict, "PARAMS", "APPLYFIBERFLAT_QL", "SKYSUB_QL", keyList) - myDict = transferKEY(myDict, "PARAMS", "SKYSUB_QL", "APPLYFLUXCALIBRATION", keyList) - - ### Changing Task Names - myDict = rename_task(myDict, "PREPROC", "CHECK_CCDs") - myDict = rename_task(myDict, "BOXCAREXTRACT", "CHECK_FIBERS") - myDict = rename_task(myDict, "EXTRACT_QP", "CHECK_FIBERS") - myDict = rename_task(myDict, "APPLYFLUXCALIBRATION", "CHECK_SPECTRA") - myDict = rename_task(myDict, "RESOLUTIONFIT", "CHECK_ARC") - myDict = rename_task(myDict, "COMPUTEFIBERFLAT_QL", "CHECK_FIBERFLAT") - myDict = rename_task(myDict, "COMPUTEFIBERFLAT_QP", "CHECK_FIBERFLAT") - ### Removing empty (or unused Pipeline steps - myDict = remove_task(myDict, "FLEXURE") - myDict = remove_task(myDict, "APPLYFIBERFLAT_QL") - myDict = remove_task(myDict, "APPLYFIBERFLAT_QP") - myDict = remove_task(myDict, "SKYSUB_QL") - myDict = remove_task(myDict, "SKYSUB_QP") - myDict = remove_task(myDict, "INITIALIZE") - - return myDict - - -############################################################### -### Replacing "PIPELINE_STEPS" with "TASKS" -### Re-ordering Task metrics and Params - -def taskMaker(myDict): - - if "PIPELINE_STEPS" in myDict: - - tasks = {} - task_data = myDict["PIPELINE_STEPS"] - - task_data = modify_tasks(task_data) - - del myDict["PIPELINE_STEPS"] - myDict["TASKS"] = task_data - - return myDict -############################################################### - - -################################### -# GENERAL_INFO section -#def delKey(d, k, val=None, remove=True): - - #if isinstance(d, dict): - #key_list = [] - #for key, value in d.items(): - #if key==k: - - #val = value - #key_list.append(key) - #val = delKey(value, k, val=val, remove=remove) - #if remove: - #for key in key_list: - #del d[key] - - #elif isinstance(d, list): - - #try: - #for i in range(len(d)): - #val = delKey(d[i], k, val=val, remove=remove) - #except: - #return val - - #else: return val - - #return val - - - -def delKey(d, k, val=None, remove=True, include=False): - - if isinstance(d, dict): - key_list = [] - for key, value in d.items(): - if (key==k and not include) or (k in key and include): - - val = value - key_list.append(key) - val = delKey(value, k, val=val, remove=remove) - if remove: - for key in key_list: - del d[key] - - elif isinstance(d, list): - - try: - for i in range(len(d)): - val = delKey(d[i], k, val=val, remove=remove) - except: - return val - - else: return val - - return val -################################### -# facilitate the GENERAL_INFO section - -def reOrderDict(mergeDict): - - for Night in mergeDict["NIGHTS"]: - for Exposure in Night["EXPOSURES"]: - for Camera in Exposure["CAMERAS"]: - - ra = delKey(Camera, "RA") - dec = delKey(Camera, "DEC") - program = delKey(Camera, "PROGRAM") - - airmass = delKey(Camera, "AIRMASS") - seeing = delKey(Camera, "SEEING") - exptime = delKey(Camera, "EXPTIME") - desispec_run_ver = delKey(Camera, "PROC_DESISPEC_VERSION") # desispec version in the raw FITS header - desispec_fits_ver = delKey(Camera, "FITS_DESISPEC_VERSION") # desispec version of the software release - quicklook_run_ver = delKey(Camera, "PROC_QuickLook_VERSION") # version of the quicklook development state - fibermags = delKey(Camera,"FIBER_MAGS") - skyfib_id = delKey(Camera,"SKYFIBERID") - nskyfib = delKey(Camera,"NSKY_FIB") - - delKey(Camera, "SKYSUB_QL") - delKey(Camera, "MED_RESID") - delKey(Camera, "MED_RESID_FIBER") - delKey(Camera, "MED_RESID_WAVE") - delKey(Camera, "MED_RESID") - delKey(Camera, "MED_RESID_FIBER") - delKey(Camera, "RESID_PER") - delKey(Camera, "RESID_STATUS") - delKey(Camera, "BIAS") - delKey(Camera, "NOISE") - - elg_fiberid = delKey(Camera, "ELG_FIBERID") - lrg_fiberid = delKey(Camera, "LRG_FIBERID") - qso_fiberid = delKey(Camera, "QSO_FIBERID") - star_fiberid = delKey(Camera, "STAR_FIBERID", remove=False) - - std_fiberid = delKey(Camera, "STD_FIBERID", remove=False) - - if star_fiberid is None: - star_fiberid = std_fiberid - - b_peaks = delKey(Camera, "B_PEAKS") - r_peaks = delKey(Camera, "R_PEAKS") - z_peaks = delKey(Camera, "Z_PEAKS") - - try: ra = [float("%.5f" % m) for m in ra] - except: ra=None - - try: dec = [float("%.5f" % m) for m in dec] - except: dec=None - - - # Date/time of the merger i.e., QL run - time is in UTC = Mayall local time + 7h - def utcnow(): - return datetime.datetime.now(tz=pytz.utc) - - QLrun_datime = utcnow().isoformat() - - datetime.datetime.now(datetime.timezone.utc) - datetime.datetime.now(tz=pytz.utc) - - - Camera["GENERAL_INFO"]={"QLrun_datime_UTC":QLrun_datime,"PROGRAM":format(program).upper(),"SEEING":seeing,"AIRMASS":airmass,"EXPTIME":exptime,"FITS_DESISPEC_VERSION":desispec_fits_ver,"PROC_DESISPEC_VERSION":desispec_run_ver,"PROC_QuickLook_VERSION":quicklook_run_ver,"RA":ra,"DEC":dec,"SKY_FIBERID":skyfib_id,"ELG_FIBERID":elg_fiberid,"LRG_FIBERID":lrg_fiberid,"QSO_FIBERID":qso_fiberid,"STAR_FIBERID":star_fiberid,"B_PEAKS":b_peaks,"R_PEAKS":r_peaks,"Z_PEAKS":z_peaks,"FIBER_MAGS":fibermags,"NSKY_FIB":nskyfib} - -################################### - -def EditDic(Camera): - desispec_run_ver = delKey(Camera, "PROC_DESISPEC_VERSION") # desispec version in the raw FITS header - desispec_fits_ver = delKey(Camera, "FITS_DESISPEC_VERSION") # desispec version of the software release - quicklook_run_ver = delKey(Camera, "PROC_QuickLook_VERSION") # version of the quivklook development state - - delKey(Camera, "SKYSUB_QL") - delKey(Camera, "MED_RESID") - delKey(Camera, "MED_RESID_FIBER") - delKey(Camera, "MED_RESID_WAVE") - delKey(Camera, "MED_RESID") - delKey(Camera, "MED_RESID_FIBER") - delKey(Camera, "RESID_PER") - delKey(Camera, "RESID_STATUS") - delKey(Camera, "BIAS") - delKey(Camera, "NOISE") - delKey(Camera, "XWSHIFT_AMP") - delKey(Camera, "XWSIGMA_SHIFT") - delKey(Camera, "NREJ") - delKey(Camera, "MED_SKY") - delKey(Camera, "NBAD_PCHI") - - all_Steps=delKey(Camera,"PIPELINE_STEPS") # returns a list of dictionaries, each holding one step - step_dict={} - for step in all_Steps: - if step['PIPELINE_STEP'] == 'INITIALIZE': - Camera['GENERAL_INFO']=delKey(step,"METRICS",remove=False,include=True) - else: - step_Name=delKey(step,"PIPELINE_STEP") - step_dict[step_Name]=step - Camera["PIPELINE_STEPS"]=step_dict - - program=Camera['GENERAL_INFO']['PROGRAM'] - sciprog = ["DARK","GRAY","BRIGHT"] - QAlist=["BIAS_AMP","LITFRAC_AMP","NOISE_AMP","XWSIGMA","XYSHIFTS","NGOODFIB","DELTAMAG_TGT","FIDSNR_TGT","SKYRBAND","PEAKCOUNT", "SKYCONT"] - - if program in sciprog: - sciprog.remove(program) - for prog in sciprog: - for qa in QAlist: - delKey(Camera,qa+'_'+prog+"_REF",include=True) - - Camera["GENERAL_INFO"]["FITS_DESISPEC_VERSION"]=desispec_fits_ver - Camera["GENERAL_INFO"]["PROC_DESISPEC_VERSION"]=desispec_run_ver - Camera["GENERAL_INFO"]["PROC_QuickLook_VERSION"]=quicklook_run_ver - -################################### - - -class QL_QAMerger: - def __init__(self,night,expid,flavor,camera,program,convdict): - self.__night=night - self.__expid=expid - self.__flavor=flavor - self.__camera=camera - self.__program=program - self.__stepsArr=[] - #self.__schema={'NIGHTS':[{'NIGHT':night,'EXPOSURES':[{'EXPID':expid,'FLAVOR':flavor,'PROGRAM':program, 'CAMERAS':[{'CAMERA':camera, 'PIPELINE_STEPS':self.__stepsArr}]}]}]} - - #general_Info = esnEditDic(self.__stepsArr) - - # Get flux information from fibermap and convert to fiber magnitudes - if flavor == 'science': - if camera[0].lower()=='b':decamfilter='G' - elif camera[0].lower()=='r': decamfilter='R' - elif camera[0].lower()=='z': decamfilter='Z' - self.__schema={'NIGHT':night,'EXPID':expid,'CAMERA':camera,'FLAVOR':flavor,'PIPELINE_STEPS':self.__stepsArr} - else: - self.__schema={'NIGHT':night,'EXPID':expid,'CAMERA':camera,'FLAVOR':flavor,'PIPELINE_STEPS':self.__stepsArr} - - class QL_Step: - def __init__(self,paName,paramsDict,metricsDict): - self.__paName=paName - self.__pDict=paramsDict - self.__mDict=metricsDict - def getStepName(self): - return self.__paName - def addParams(self,pdict): - self.__pDict.update(pdict) - def addMetrics(self,mdict): - self.__mDict.update(mdict) - def addPipelineStep(self,stepName): - metricsDict={} - paramsDict={} - stepDict={"PIPELINE_STEP":stepName.upper(),'METRICS':metricsDict,'PARAMS':paramsDict} - self.__stepsArr.append(stepDict) - return self.QL_Step(stepName,paramsDict,metricsDict) - - - - def writeTojsonFile(self,fileName): - g=open(fileName,'w') - - - myDict = yamlify(self.__schema) - #reOrderDict(myDict) - - # remove lists ... after this step there is no list of dictionaries - EditDic(myDict) - - # this step modifies Takse, renames them, and re-arrange Metrics and corresponding Paramas - myDict = taskMaker(myDict) - - json.dump(myDict, g, sort_keys=True, indent=4) - g.close() diff --git a/deprecated/py/desispec/quicklook/palib.py b/deprecated/py/desispec/quicklook/palib.py deleted file mode 100644 index 5fb3f091e..000000000 --- a/deprecated/py/desispec/quicklook/palib.py +++ /dev/null @@ -1,175 +0,0 @@ -""" -desispec.quicklook.palib -======================== - -Low level functions to be from top level PAs. -""" -import numpy as np -from desispec.quicklook import qlexceptions,qllogger -qlog=qllogger.QLLogger("QuickLook",20) -log=qlog.getlog() - - -def project(x1,x2): - """ - return a projection matrix so that arrays are related by linear interpolation - x1: Array with one binning - x2: new binning - - Return Pr: x1= Pr.dot(x2) in the overlap region - """ - x1=np.sort(x1) - x2=np.sort(x2) - Pr=np.zeros((len(x2),len(x1))) - - e1 = np.zeros(len(x1)+1) - e1[1:-1]=(x1[:-1]+x1[1:])/2.0 # calculate bin edges - e1[0]=1.5*x1[0]-0.5*x1[1] - e1[-1]=1.5*x1[-1]-0.5*x1[-2] - e1lo = e1[:-1] # make upper and lower bounds arrays vs. index - e1hi = e1[1:] - - e2=np.zeros(len(x2)+1) - e2[1:-1]=(x2[:-1]+x2[1:])/2.0 # bin edges for resampled grid - e2[0]=1.5*x2[0]-0.5*x2[1] - e2[-1]=1.5*x2[-1]-0.5*x2[-2] - - for ii in range(len(e2)-1): # columns - #- Find indices in x1, containing the element in x2 - #- This is much faster than looping over rows - - k = np.where((e1lo<=e2[ii]) & (e1hi>e2[ii]))[0] - # this where obtains single e1 edge just below start of e2 bin - emin = e2[ii] - emax = e1hi[k] - if e2[ii+1] < emax : emax = e2[ii+1] - dx = (emax-emin)/(e1hi[k]-e1lo[k]) - Pr[ii,k] = dx # enter first e1 contribution to e2[ii] - - if e2[ii+1] > emax : - # cross over to another e1 bin contributing to this e2 bin - l = np.where((e1 < e2[ii+1]) & (e1 > e1hi[k]))[0] - if len(l) > 0 : - # several-to-one resample. Just consider 3 bins max. case - Pr[ii,k[0]+1] = 1.0 # middle bin fully contained in e2 - q = k[0]+2 - else : q = k[0]+1 # point to bin partially contained in current e2 bin - - try: - emin = e1lo[q] - emax = e2[ii+1] - dx = (emax-emin)/(e1hi[q]-e1lo[q]) - Pr[ii,q] = dx - except: - pass - - #- edge: - if x2[-1]==x1[-1]: - Pr[-1,-1]=1 - return Pr - -def resample_spec(wave,flux,outwave,ivar=None): - """ - rebinning conserving S/N - Algorithm is based on http://www.ast.cam.ac.uk/%7Erfc/vpfit10.2.pdf - Appendix: B.1 - - Args: - wave : original wavelength array (expected (but not limited) to be native CCD pixel wavelength grid - outwave: new wavelength array: expected (but not limited) to be uniform binning - flux : df/dx (Flux per A) sampled at x - ivar : ivar in original binning. If not None, ivar in new binning is returned. - - Note: - Full resolution computation for resampling is expensive for quicklook. - - desispec.interpolation.resample_flux using weights by ivar does not conserve total S/N. - Tests with arc lines show much narrow spectral profile, thus not giving realistic psf resolutions - This algorithm gives the same resolution as obtained for native CCD binning, i.e, resampling has - insignificant effect. Details,plots in the arc processing note. - """ - #- convert flux to per bin before projecting to new bins - flux=flux*np.gradient(wave) - - Pr=project(wave,outwave) - n=len(wave) - newflux=Pr.dot(flux) - #- convert back to df/dx (per angstrom) sampled at outwave - newflux/=np.gradient(outwave) #- per angstrom - if ivar is None: - return newflux - else: - ivar = ivar/(np.gradient(wave))**2.0 - newvar=Pr.dot(ivar**(-1.0)) #- maintaining Total S/N - # RK: this is just a kludge until we more robustly ensure newvar is correct - k = np.where(newvar <= 0.0)[0] - newvar[k] = 0.0000001 # flag bins with no contribution from input grid - newivar=1/newvar - # newivar[k] = 0.0 - - #- convert to per angstrom - newivar*=(np.gradient(outwave))**2.0 - return newflux, newivar - -def get_resolution(wave,nspec,tset,usesigma=False): - """ - Calculates approximate resolution values at given wavelengths in the format that can directly - feed resolution data of desispec.frame.Frame object. - - wave: wavelength array - nsepc: no of spectra (int) - tset: desispec.xytraceset like object - usesigma: allows to use sigma from psf file for resolution computation. - - returns : resolution data (nspec,nband,nwave); nband = 1 for usesigma = False, otherwise nband=21 - """ - #from desispec.resolution import Resolution - from desispec.quicklook.qlresolution import QuickResolution - nwave=len(wave) - if usesigma: - nband=21 - else: - nband=1 # only for dimensionality purpose of data model. - resolution_data=np.zeros((nspec,nband,nwave)) - - if usesigma: #- use sigmas for resolution based on psffile type - for ispec in range(nspec): - thissigma=tset.ysig_vs_wave(ispec,wave) #- in pixel units - Rsig=QuickResolution(sigma=thissigma,ndiag=nband) - resolution_data[ispec]=Rsig.data - - return resolution_data - -def apply_flux_calibration(frame,fluxcalib): - """ - Apply flux calibration to sky subtracted qframe - Use offline algorithm, but assume qframe object is input - and that it is on native ccd wavelength grid - Calibration vector is resampled to frame wavelength grid - - frame: QFrame object - fluxcalib: FluxCalib object - - Modifies frame.flux and frame.ivar - """ - from desispec.quicklook.palib import resample_spec - - nfibers=frame.nspec - - resample_calib=[] - resample_ivar=[] - for i in range(nfibers): - rescalib,resivar=resample_spec(fluxcalib.wave,fluxcalib.calib[i],frame.wave[i],ivar=fluxcalib.ivar[i]) - resample_calib.append(rescalib) - resample_ivar.append(resivar) - fluxcalib.calib=np.array(resample_calib) - fluxcalib.ivar=np.array(resample_ivar) - - C = fluxcalib.calib - frame.flux=frame.flux*(C>0)/(C+(C==0)) - frame.ivar*=(fluxcalib.ivar>0)*(C>0) - for j in range(nfibers): - ok=np.where(frame.ivar[j]>0)[0] - if ok.size>0: - frame.ivar[j,ok]=1./(1./(frame.ivar[j,ok]*C[j,ok]**2)+frame.flux[j,ok]**2/(fluxcalib.ivar[j,ok]*C[j,ok]**4)) - diff --git a/deprecated/py/desispec/quicklook/pas.py b/deprecated/py/desispec/quicklook/pas.py deleted file mode 100644 index 5e38aa696..000000000 --- a/deprecated/py/desispec/quicklook/pas.py +++ /dev/null @@ -1,37 +0,0 @@ -""" -desispec.quicklook.pas -====================== - -""" -from desispec.quicklook import qllogger -from desispec.quicklook import qlexceptions - -class PipelineAlg: - """ Simple base class for Pipeline algorithms """ - def __init__(self,name,inptype,outtype,config,logger=None): - if logger is None: - qll=qllogger.QLLogger() - self.m_log=qll.getlog(name) - else: - self.m_log=logger - self.__inpType__=type(inptype) - self.__outType__=type(outtype) - self.name=name - self.config=config - self.m_log.debug("initializing Monitoring alg {}".format(name)) - def __call__(self,*args,**kwargs): - return self.run(*args,**kwargs) - def run(self,*argv,**kwargs): - pass - def is_compatible(self,Type): - return isinstance(Type,self.__inpType__) - def get_output_type(self): - return self.__outType__ - - def get_default_config(self): - """ return a dictionary of 3-tuples, - field 0 is the name of the parameter - field 1 is the default value of the parameter - field 2 is the comment for human readeable format. - Field 2 can be used for QLF to dynamically setup the display""" - return None diff --git a/deprecated/py/desispec/quicklook/procalgs.py b/deprecated/py/desispec/quicklook/procalgs.py deleted file mode 100644 index 1b6f31ee0..000000000 --- a/deprecated/py/desispec/quicklook/procalgs.py +++ /dev/null @@ -1,1297 +0,0 @@ -""" -desispec.quicklook.procalgs -=========================== - -Pipeline Preprocessing algorithms for Quicklook. -""" - -import numpy as np -import os,sys -import astropy -import astropy.io.fits as fits -from desispec import io -from desispec.io import read_raw,read_image -from desispec.io.meta import findfile -from desispec.io.fluxcalibration import read_average_flux_calibration -from desispec.calibfinder import findcalibfile -from desispec.quicklook import pas -from desispec.quicklook import qlexceptions,qllogger -from desispec.image import Image as im -from desispec.frame import Frame as fr -from desispec.io.xytraceset import read_xytraceset -from desispec.maskbits import ccdmask - -qlog=qllogger.QLLogger("QuickLook",20) -log=qlog.getlog() - - -class Initialize(pas.PipelineAlg): - """ - This PA takes information from the fibermap and raw header - and adds it to the general info section of the merged dictionary - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="Ready" - rawtype=astropy.io.fits.hdu.hdulist.HDUList - pas.PipelineAlg.__init__(self,name,rawtype,rawtype,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("Missing input parameter!") - sys.exit() - if not self.is_compatible(type(args[0])): - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - raw=args[0] - flavor=kwargs['Flavor'] - peaks=None - fibermap=None - if flavor != 'bias' and flavor != 'dark': - fibermap=kwargs['FiberMap'] - peaks=kwargs['Peaks'] - camera=kwargs['Camera'] - - return self.run_pa(raw,fibermap,camera,peaks,flavor) - - def run_pa(self,raw,fibermap,camera,peaks,flavor): - import pytz - import datetime - from desitarget.targetmask import desi_mask - from desispec.fluxcalibration import isStdStar - - #- Create general info dictionary to be sent to merged json - general_info={} - - #- Get information from raw header - general_info['PROGRAM']=program=raw[0].header['PROGRAM'].upper() - calibs=['arcs','flat','bias','dark'] - - if not flavor in calibs: - general_info['AIRMASS']=raw[0].header['AIRMASS'] - general_info['SEEING']=raw[0].header['SEEING'] - - #- Get information from fibermap - - #- Limit flux info to fibers in camera - minfiber=int(camera[1])*500 - maxfiber=minfiber+499 - fibermags=[] - for flux in ['FLUX_G','FLUX_R','FLUX_Z']: - fibermags.append(22.5-2.5*np.log10(fibermap[flux][minfiber:maxfiber+1])) - - #- Set sky/no flux fibers to 30 mag - for i in range(3): - skyfibs=np.where(fibermags[i]==0.)[0] - noflux=np.where(fibermags[i]==np.inf)[0] - badmags=np.array(list(set(skyfibs) | set(noflux))) - fibermags[i][badmags]=30. - - general_info['FIBER_MAGS']=fibermags - - #- Limit RA and DEC to 5 decimal places - targetra=fibermap['TARGET_RA'][minfiber:maxfiber+1] - general_info['RA']=[float("%.5f"%ra) for ra in targetra] - targetdec=fibermap['TARGET_DEC'][minfiber:maxfiber+1] - general_info['DEC']=[float("%.5f"%dec) for dec in targetdec] - - #- Find fibers in camera per target type - elgfibers=np.where((fibermap['DESI_TARGET']&desi_mask.ELG)!=0)[0] - general_info['ELG_FIBERID']=[elgfib for elgfib in elgfibers if minfiber <= elgfib <= maxfiber] - lrgfibers=np.where((fibermap['DESI_TARGET']&desi_mask.LRG)!=0)[0] - general_info['LRG_FIBERID']=[lrgfib for lrgfib in lrgfibers if minfiber <= lrgfib <= maxfiber] - qsofibers=np.where((fibermap['DESI_TARGET']&desi_mask.QSO)!=0)[0] - general_info['QSO_FIBERID']=[qsofib for qsofib in qsofibers if minfiber <= qsofib <= maxfiber] - skyfibers=np.where((fibermap['DESI_TARGET']&desi_mask.SKY)!=0)[0] - general_info['SKY_FIBERID']=[skyfib for skyfib in skyfibers if minfiber <= skyfib <= maxfiber] - general_info['NSKY_FIB']=len(general_info['SKY_FIBERID']) - stdfibers=np.where(isStdStar(fibermap))[0] - general_info['STAR_FIBERID']=[stdfib for stdfib in stdfibers if minfiber <= stdfib <= maxfiber] - - general_info['EXPTIME']=raw[0].header['EXPTIME'] -# general_info['FITS_DESISPEC_VERION']=raw[0].header['FITS_DESISPEC_VERSION'] -# general_info['PROC_DESISPEC_VERION']=raw[0].header['PROC_DESISPEC_VERSION'] -# general_info['PROC_QuickLook_VERION']=raw[0].header['PROC_QuickLook_VERSION'] - - #- Get peaks from configuration file - if not flavor != 'arcs' and flavor in calibs: - general_info['B_PEAKS']=peaks['B_PEAKS'] - general_info['R_PEAKS']=peaks['R_PEAKS'] - general_info['Z_PEAKS']=peaks['Z_PEAKS'] - - #- Get current time information - general_info['QLrun_datime_UTC']=datetime.datetime.now(tz=pytz.utc).isoformat() - - return (raw,general_info) - - -class Preproc(pas.PipelineAlg): - #- TODO: currently io itself seems to have the preproc inside it. And preproc does bias, pi - # xelflat, etc in one step. - from desispec.maskbits import ccdmask - - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="Preproc" - - rawtype=astropy.io.fits.hdu.hdulist.HDUList - pas.PipelineAlg.__init__(self,name,rawtype,im,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Was expecting {} got {}".format(type(self.__inpType__),type(args[0]))) - - input_raw=args[0][0] - - dumpfile=None - if "dumpfile" in kwargs: - dumpfile=kwargs["dumpfile"] - - if 'camera' not in kwargs: - #raise qlexceptions.ParameterException("Need Camera to run preprocess on raw files") - log.critical("Need Camera to run preprocess on raw files") - sys.exit() - - else: - camera=kwargs["camera"] - if camera.upper() not in input_raw: - raise IOError('Camera {} not in raw input'.format(camera)) - if "Bias" in kwargs: - bias=kwargs["Bias"] - else: bias=False - - if "Pixflat" in kwargs: - pixflat=kwargs["Pixflat"] - else: pixflat=False - - if "Mask" in kwargs: - mask=kwargs["Mask"] - else: mask=False - - return self.run_pa(input_raw,camera,bias=bias,pixflat=pixflat,mask=mask,dumpfile=dumpfile) - - def run_pa(self,input_raw,camera,bias=False,pixflat=False,mask=True,dumpfile='ttt1.fits'): - import desispec.preproc - - rawimage=input_raw[camera.upper()].data - header=input_raw[camera.upper()].header - primary_header=input_raw[0].header - if 'INHERIT' in header and header['INHERIT']: - h0 = input_raw[0].header - for key in h0: - if key not in header: - header[key] = h0[key] - #- WARNING!!!This is a hack for QL to run on old raw images for QLF to be working on old set of data - #if "PROGRAM" not in header: - # log.warning("Temporary hack for QL to add header key PROGRAM. Only to facilitate QLF to work on their dataset. Remove this after some time and run with new data set") - # header["PROGRAM"]= 'dark' - #if header["FLAVOR"] not in [None,'bias','arc','flat','science']: - # header["FLAVOR"] = 'science' - - img = desispec.preproc.preproc(rawimage,header,primary_header,bias=bias,pixflat=pixflat,mask=mask) - - - if img.mask is not None : - img.pix *= (img.mask==0) - - - if dumpfile is not None: - night = img.meta['NIGHT'] - expid = img.meta['EXPID'] - io.write_image(dumpfile, img) - log.debug("Wrote intermediate file %s after %s"%(dumpfile,self.name)) - - - return img - - -class Flexure(pas.PipelineAlg): - """ Use desi_compute_trace_shifts to output modified psf file - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="Flexure" - pas.PipelineAlg.__init__(self,name,im,fr,config,logger) - - def run(self,*args,**kwargs): - if 'preprocFile' not in kwargs: - #raise qlexceptions.ParameterException("Must provide preproc file for desi_compute_trace_shifts") - log.critical("Must provide preproc file for desi_compute_trace_shifts") - sys.exit() - - if 'inputPSFFile' not in kwargs: - #raise qlexceptions.ParameterException("Must provide input psf file desi_compute_trace_shifts") - log.critical("Must provide input psf file desi_compute_trace_shifts") - sys.exit() - - if 'outputPSFFile' not in kwargs: - #raise qlexceptions.ParameterException("Must provide output psf file") - log.critical("Must provide output psf file") - sys.exit() - - preproc_file=kwargs["preprocFile"] - input_file=kwargs["inputPSFFile"] - output_file=kwargs["outputPSFFile"] - - return self.run_pa(preproc_file,input_file,output_file,args) - - def run_pa(self,preproc_file,input_file,output_file,args): - from desispec.util import runcmd - #- Generate modified psf file - cmd="desi_compute_trace_shifts --image {} --psf {} --outpsf {}".format(preproc_file,input_file,output_file) - if not runcmd(cmd)[1]: - raise RuntimeError('desi_compute_trace_shifts failed, psftrace not written') - - #- return image object to pass to boxcar for extraction - img=args[0] - return img - - -class BoxcarExtract(pas.PipelineAlg): - from desispec.quicklook.qlboxcar import do_boxcar - from desispec.maskbits import ccdmask - - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="BoxcarExtract" - pas.PipelineAlg.__init__(self,name,im,fr,config,logger) - - def run(self,*args,**kwargs): - - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "PSFFile" not in kwargs: - #raise qlexceptions.ParameterException("Need PSF File") - log.critical("Need PSF File") - sys.exit() - - input_image=args[0] - - dumpfile=None - if "dumpfile" in kwargs: - dumpfile=kwargs["dumpfile"] - - flavor=kwargs["Flavor"] - - psf_filename=kwargs["PSFFile"] - #psf = PSF(psf_filename) - tset = read_xytraceset(psf_filename) - boxwidth=kwargs["BoxWidth"] - nspec=kwargs["Nspec"] - quickRes=kwargs["QuickResolution"] if "QuickResolution" in kwargs else False - if "usesigma" in kwargs: - usesigma=kwargs["usesigma"] - else: usesigma = False - - if "Wavelength" not in kwargs: - wstart = np.ceil(tset.wavemin) - wstop = np.floor(tset.wavemax) - dw = 0.5 - else: - wavelength=kwargs["Wavelength"] - if kwargs["Wavelength"] is not None: #- should be in wstart,wstop,dw format - wstart, wstop, dw = [float(w) for w in wavelength] - else: - wstart = np.ceil(tset.wavemin) - wstop = np.floor(tset.wavemax) - dw = 0.5 - wave = np.arange(wstart, wstop+dw/2.0, dw) - if "Specmin" not in kwargs: - specmin=0 - else: - specmin=kwargs["Specmin"] - if kwargs["Specmin"] is None: - specmin=0 - - if "Nspec" not in kwargs: - nspec = tset.nspec - else: - nspec=kwargs["Nspec"] - if nspec is None: - nspec=tset.nspec - - specmax = specmin + nspec - - camera = input_image.meta['CAMERA'].lower() #- b0, r1, .. z9 - spectrograph = int(camera[1]) - fibermin = spectrograph*500 + specmin - if "FiberMap" not in kwargs: - fibermap = None - fibers = np.arange(fibermin, fibermin+nspec, dtype='i4') - else: - fibermap=kwargs["FiberMap"] - fibermap = fibermap[fibermin:fibermin+nspec] - fibers = fibermap['FIBER'] - if "Outfile" in kwargs: - outfile=kwargs["Outfile"] - else: - outfile=None - maskFile=None - if "MaskFile" in kwargs: - maskFile=kwargs['MaskFile'] - - #- Add some header keys relevant for this extraction - input_image.meta['NSPEC'] = (nspec, 'Number of spectra') - input_image.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') - input_image.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') - input_image.meta['WAVESTEP']= (dw, 'Wavelength step size [Angstroms]') - - return self.run_pa(input_image,flavor,tset,wave,boxwidth,nspec, - fibers=fibers,fibermap=fibermap,dumpfile=dumpfile, - maskFile=maskFile,usesigma=usesigma,quick_resolution=quickRes) - - def run_pa(self,input_image,flavor,tset,outwave,boxwidth,nspec, - fibers=None,fibermap=None,dumpfile=None, - maskFile=None,usesigma=False,quick_resolution=False): - from desispec.quicklook.qlboxcar import do_boxcar - #import desispec.tset - flux,ivar,Rdata=do_boxcar(input_image, tset, outwave, boxwidth=boxwidth, - nspec=nspec,maskFile=maskFile,usesigma=usesigma, - quick_resolution=quick_resolution) - - #- write to a frame object - qndiag=21 - wsigma=None - if quick_resolution: - log.warning("deprecated, please use QFrame format to store sigma values") - wsigma=np.zeros(flux.shape) - if tset.ysig_vs_wave_traceset is not None : - dw = np.gradient(outwave) - for i in range(nspec): - ysig = tset.ysig_vs_wave(i,outwave) - y = tset.y_vs_wave(i,outwave) - dydw = np.gradient(y)/dw - wsigma[i] = ysig/dydw # in A - frame = fr(outwave, flux, ivar, resolution_data=Rdata,fibers=fibers, - meta=input_image.meta, fibermap=fibermap, - wsigma=wsigma,ndiag=qndiag) - - if dumpfile is not None: - night = frame.meta['NIGHT'] - expid = frame.meta['EXPID'] - io.write_frame(dumpfile, frame) - log.debug("Wrote intermediate file %s after %s"%(dumpfile,self.name)) - - return frame - - def get_default_config(self): - return {("BoxWidth",2.5,"Boxcar halfwidth"), - ("PSFFile","%%PSFFile","PSFFile to use"), - ("DeltaW",0.5,"Binwidth of extrapolated wavelength array"), - ("Nspec",500,"number of spectra to extract") - } - - -# TODO 2d extraction runs fine as well. Will need more testing of the setup. - -class Extraction_2d(pas.PipelineAlg): - """ - Offline 2D extraction for offline QuickLook - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="2D Extraction" # using specter.extract.ex2d - pas.PipelineAlg.__init__(self,name,im,fr,config,logger) - - def run(self,*args,**kwargs): - - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "PSFFile_sp" not in kwargs: - #raise qlexceptions.ParameterException("Need PSF File") - log.critical("Need PSF File") - sys.exit() - - from specter.psf import load_psf - - input_image=args[0] - psffile=kwargs["PSFFile_sp"] - psf=load_psf(psffile) - - if "Wavelength" not in kwargs: - wstart = np.ceil(psf.wmin_all) - wstop = np.floor(psf.wmax_all) - dw = 0.5 - else: - wavelength=kwargs["Wavelength"] - if kwargs["Wavelength"] is not None: #- should be in wstart,wstop,dw format - wstart, wstop, dw = [float(w) for w in wavelength] - else: - wstart = np.ceil(psf.wmin_all) - wstop = np.floor(psf.wmax_all) - dw = 0.5 - wave = np.arange(wstart, wstop+dw/2.0, dw) - - if "Specmin" not in kwargs: - specmin=0 - else: - specmin=kwargs["Specmin"] - if kwargs["Specmin"] is None: - specmin=0 - - if "Nspec" not in kwargs: - nspec = psf.nspec - else: - nspec=kwargs["Nspec"] - if nspec is None: - nspec=psf.nspec - - specmax = specmin + nspec - - camera = input_image.meta['CAMERA'].lower() #- b0, r1, .. z9 - spectrograph = int(camera[1]) - fibermin = spectrograph*500 + specmin - - if "FiberMap" not in kwargs: - fibermap = None - fibers = np.arange(fibermin, fibermin+nspec, dtype='i4') - else: - fibermap=kwargs["FiberMap"] - fibermap = fibermap[fibermin:fibermin+nspec] - fibers = fibermap['FIBER'] - if "Regularize" in kwargs: - regularize=kwargs["Regularize"] - else: - regularize=False - if "ndecorr" in kwargs: - ndecorr=ndecorr - else: - ndecorr=True - - bundlesize=25 #- hard coded - - if "Outfile" in kwargs: - outfile=kwargs["Outfile"] - else: - outfile=None - - if "Nwavestep" in kwargs: - wavesize=kwargs["Nwavestep"] - else: - wavesize=50 - - return self.run_pa(input_image,psf,specmin,nspec,wave,regularize=regularize,ndecorr=ndecorr, bundlesize=bundlesize, wavesize=wavesize,outfile=outfile,fibers=fibers,fibermap=fibermap) - - def run_pa(self,input_image,psf,specmin,nspec,wave,regularize=None,ndecorr=True,bundlesize=25,wavesize=50, outfile=None,fibers=None,fibermap=None): - import specter - from specter.extract import ex2d - - flux,ivar,Rdata=ex2d(input_image.pix,input_image.ivar*(input_image.mask==0),psf,specmin,nspec,wave,regularize=regularize,ndecorr=ndecorr,bundlesize=bundlesize,wavesize=wavesize) - - #- Augment input image header for output - input_image.meta['NSPEC'] = (nspec, 'Number of spectra') - input_image.meta['WAVEMIN'] = (wave[0], 'First wavelength [Angstroms]') - input_image.meta['WAVEMAX'] = (wave[-1], 'Last wavelength [Angstroms]') - input_image.meta['WAVESTEP']= (wave[1]-wave[0], 'Wavelength step size [Angstroms]') - input_image.meta['SPECTER'] = (specter.__version__, 'https://github.com/desihub/specter') - #input_image.meta['IN_PSF'] = (_trim(psf_file), 'Input spectral PSF') - #input_image.meta['IN_IMG'] = (_trim(input_file), 'Input image') - - frame = fr(wave, flux, ivar, resolution_data=Rdata,fibers=fibers, meta=input_image.meta, fibermap=fibermap) - - if outfile is not None: #- writing to a frame file if needed. - io.write_frame(outfile,frame) - log.debug("wrote frame output file %s"%outfile) - - return frame - - -class ComputeFiberflat(pas.PipelineAlg): - """ PA to compute fiberflat field correction from a DESI continuum lamp frame - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="ComputeFiberflat" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - input_frame=args[0] #- frame object to calculate fiberflat from - if "outputFile" not in kwargs: - #raise qlexceptions.ParameterException("Need output file name to write fiberflat File") - log.critical("Need output file name to write fiberflat File") - sys.exit() - - outputfile=kwargs["outputFile"] - - return self.run_pa(input_frame,outputfile) - - def run_pa(self,input_frame,outputfile): - from desispec.fiberflat import compute_fiberflat - import desispec.io.fiberflat as ffIO - fiberflat=compute_fiberflat(input_frame) - ffIO.write_fiberflat(outputfile,fiberflat,header=input_frame.meta) - log.debug("Fiberflat file wrtten. Exiting Quicklook for this configuration") #- File written no need to go further - # !!!!! SAMI to whoever wrote this - # PA's or any other components *CANNOT* call sys.exit()!! this needs to be fixed!!!!! - sys.exit(0) - -class ComputeFiberflat_QL(pas.PipelineAlg): - """ PA to compute fiberflat field correction from a DESI continuum lamp frame - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="ComputeFiberflat" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - input_frame=args[0] #- frame object to calculate fiberflat from - if "outputFile" not in kwargs: - #raise qlexceptions.ParameterException("Need output file name to write fiberflat File") - log.critical("Need output file name to write fiberflat File") - sys.exit() - - outputfile=kwargs["outputFile"] - - return self.run_pa(input_frame,outputfile) - - def run_pa(self,frame,outputfile): - from desispec.fiberflat import FiberFlat - import desispec.io.fiberflat as ffIO - from desispec.linalg import cholesky_solve - nwave=frame.nwave - nfibers=frame.nspec - wave = frame.wave #- this will become part of output too - flux = frame.flux - sumFlux=np.zeros((nwave)) - realFlux=np.zeros(flux.shape) - ivar = frame.ivar*(frame.mask==0) - #deconv - for fib in range(nfibers): - Rf=frame.R[fib].todense() - B=flux[fib] - try: - realFlux[fib]=cholesky_solve(Rf,B) - except: - log.warning("cholesky_solve failed for fiber {}, using numpy.linalg.solve instead.".format(fib)) - realFlux[fib]=np.linalg.solve(Rf,B) - sumFlux+=realFlux[fib] - #iflux=nfibers/sumFlux - flat = np.zeros(flux.shape) - flat_ivar=np.zeros(ivar.shape) - avg=sumFlux/nfibers - for fib in range(nfibers): - Rf=frame.R[fib] - # apply and reconvolute - M=Rf.dot(avg) - M0=(M==0) - flat[fib]=(~M0)*flux[fib]/(M+M0) +M0 - flat_ivar[fib]=ivar[fib]*M**2 - fibflat=FiberFlat(frame.wave.copy(),flat,flat_ivar,frame.mask.copy(),avg) - - #fiberflat=compute_fiberflat(input_frame) - ffIO.write_fiberflat(outputfile,fibflat,header=frame.meta) - log.info("Wrote fiberflat file {}".format(outputfile)) - - fflatfile = ffIO.read_fiberflat(outputfile) - - return fflatfile - -class ApplyFiberFlat(pas.PipelineAlg): - """ - PA to Apply the fiberflat field to the given frame - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="ApplyFiberFlat" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "FiberFlatFile" not in kwargs: - #raise qlexceptions.ParameterException("Need Fiberflat file") - log.critical("Need Fiberflat file") - sys.exit() - - input_frame=args[0] - - - fiberflat=kwargs["FiberFlatFile"] - - return self.run_pa(input_frame,fiberflat) - - def run_pa(self,input_frame,fiberflat): - - from desispec.fiberflat import apply_fiberflat - apply_fiberflat(input_frame,fiberflat) - return input_frame - -class ApplyFiberFlat_QL(pas.PipelineAlg): - """ - PA to Apply the fiberflat field (QL) to the given frame - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="Apply FiberFlat" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "FiberFlatFile" not in kwargs: - #raise qlexceptions.ParameterException("Need Fiberflat file") - log.critical("Need Fiberflat file") - sys.exit() - - - input_frame=args[0] - - dumpfile=None - if "dumpfile" in kwargs: - dumpfile=kwargs["dumpfile"] - - fiberflat=kwargs["FiberFlatFile"] - - return self.run_pa(input_frame,fiberflat,dumpfile=dumpfile) - - def run_pa(self,input_frame,fiberflat,dumpfile=None): - - from desispec.quicklook.quickfiberflat import apply_fiberflat - fframe=apply_fiberflat(input_frame,fiberflat) - - if dumpfile is not None: - night = fframe.meta['NIGHT'] - expid = fframe.meta['EXPID'] - io.write_frame(dumpfile, fframe) - log.debug("Wrote intermediate file %s after %s"%(dumpfile,self.name)) - - return fframe - - -class ComputeSky(pas.PipelineAlg): - """ PA to compute sky model from a DESI frame - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="ComputeSky" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "FiberFlatFile" not in kwargs: #- need this as fiberflat has to apply to frame first - #raise qlexceptions.ParameterException("Need Fiberflat frame file") - log.critical("Need Fiberflat frame file!") - sys.exit() - - input_frame=args[0] #- frame object to calculate sky from - if "FiberMap" in kwargs: - fibermap=kwargs["FiberMap"] - if "Outfile" not in kwargs: - #raise qlexceptions.ParameterException("Need output file name to write skymodel") - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - fiberflat=kwargs["FiberFlatFile"] - outputfile=kwargs["Outfile"] - return self.run_pa(input_frame,fiberflat,outputfile) - - def run_pa(self,input_frame,fiberflat,outputfile): - from desispec.fiberflat import apply_fiberflat - from desispec.sky import compute_sky - from desispec.io.sky import write_sky - - #- First apply fiberflat to sky fibers - apply_fiberflat(input_frame,fiberflat) - - #- calculate the model - skymodel=compute_sky(input_frame) - write_sky(outputfile,skymodel,input_frame.meta) - log.debug("Sky Model file wrtten. Exiting pipeline for this configuration") - sys.exit(0) - -class ComputeSky_QL(pas.PipelineAlg): - """ PA to compute sky model from a DESI frame - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="ComputeSky_QL" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - input_frame=args[0] #- frame object to calculate sky from. Should be fiber flat corrected - if "FiberMap" in kwargs: - fibermap=kwargs["FiberMap"] - else: fibermap=None - - if "Apply_resolution" in kwargs: - apply_resolution=kwargs["Apply_resolution"] - - if "Outfile" not in kwargs: - #raise qlexceptions.ParameterException("Need output file name to write skymodel") - log.critical("Need output file name to write skymodel!") - sys.exit() - - outputfile=kwargs["Outfile"] - return self.run_pa(input_frame,outputfile,fibermap=fibermap,apply_resolution=apply_resolution) - - def run_pa(self,input_frame,outputfile,fibermap=None,apply_resolution=False): #- input frame should be already fiberflat fielded - from desispec.io.sky import write_sky - from desispec.quicklook.quicksky import compute_sky - - skymodel=compute_sky(input_frame,fibermap,apply_resolution=apply_resolution) - - write_sky(outputfile,skymodel,input_frame.meta) - # SEE ABOVE COMMENT!!!! - log.debug("Sky Model file wrtten. Exiting the pipeline for this configuration") - sys.exit(0) - -class SkySub(pas.PipelineAlg): - - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="SkySub" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "SkyFile" not in kwargs: - #raise qlexceptions.ParameterException("Need Skymodel file") - log.critical("Need Skymodel file!") - sys.exit() - - - input_frame=args[0] #- this must be flat field applied before sky subtraction in the pipeline - skyfile=kwargs["SkyFile"] #- Read sky model file itself from an argument - from desispec.io.sky import read_sky - skymodel=read_sky(skyfile) - - return self.run_pa(input_frame,skymodel) - - def run_pa(self,input_frame,skymodel): - from desispec.sky import subtract_sky - subtract_sky(input_frame,skymodel) - return (input_frame, skymodel) - -class SkySub_QL(pas.PipelineAlg): - """ - This is for QL Sky subtraction. The input frame object should be fiber flat corrected. - Unlike offline, if no skymodel file is given as input, a sky compute method is called - to create a skymodel object and then subtraction is performed. Outputing that skymodel - to a file is optional and can be configured. - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="SkySub_QL" - pas.PipelineAlg.__init__(self,name,fr,type(tuple),config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - input_frame=args[0] #- this must be flat field applied before sky subtraction in the pipeline - - dumpfile=None - if "dumpfile" in kwargs: - dumpfile=kwargs["dumpfile"] - - if "SkyFile" in kwargs: - from desispec.io.sky import read_sky - skyfile=kwargs["SkyFile"] #- Read sky model file itself from an argument - log.debug("Using given sky file %s for subtraction"%skyfile) - - skymodel=read_sky(skyfile) - - else: - if "Outskyfile" in kwargs: - outskyfile=kwargs["Outskyfile"] - else: outskyfile=None - - log.debug("No sky file given. Computing sky first") - from desispec.quicklook.quicksky import compute_sky - if "Apply_resolution" in kwargs: - apply_resolution=kwargs["Apply_resolution"] - log.debug("Apply fiber to fiber resolution variation in computing sky") - else: apply_resolution = False - fibermap=input_frame.fibermap - skymodel=compute_sky(input_frame,fibermap,apply_resolution=apply_resolution) - if outskyfile is not None: - from desispec.io.sky import write_sky - log.debug("writing an output sky model file %s "%outskyfile) - write_sky(outskyfile,skymodel,input_frame.meta) - - #- now do the subtraction - return self.run_pa(input_frame,skymodel,dumpfile=dumpfile) - - def run_pa(self,input_frame,skymodel,dumpfile=None): - from desispec.quicklook.quicksky import subtract_sky - sframe=subtract_sky(input_frame,skymodel) - - if dumpfile is not None: - night = sframe.meta['NIGHT'] - expid = sframe.meta['EXPID'] - io.write_frame(dumpfile, sframe) - log.debug("Wrote intermediate file %s after %s"%(dumpfile,self.name)) - - return (sframe,skymodel) - -class ApplyFluxCalibration(pas.PipelineAlg): - """PA to apply flux calibration to the given sframe - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="Apply Flux Calibration" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0][0])): - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0][0]))) - - input_frame=args[0][0] - - if "outputfile" in kwargs: - outputfile=kwargs["outputfile"] - else: - log.critical("Must provide output file to write cframe") - sys.exit() - - return self.run_pa(input_frame,outputfile=outputfile) - - def run_pa(self,frame,outputfile=None): - night=frame.meta['NIGHT'] - camera=frame.meta['CAMERA'] - expid=frame.meta['EXPID'] - - rawfile=findfile('raw',night,expid,rawdata_dir=os.environ['QL_SPEC_DATA']) - rawfits=fits.open(rawfile) - primary_header=rawfits[0].header - image=read_raw(rawfile,camera) - - fluxcalib_filename=findcalibfile([image.meta,primary_header],"FLUXCALIB") - - fluxcalib = read_average_flux_calibration(fluxcalib_filename) - log.info("read average calib in {}".format(fluxcalib_filename)) - seeing = frame.meta["SEEING"] - airmass = frame.meta["AIRMASS"] - exptime = frame.meta["EXPTIME"] - exposure_calib = fluxcalib.value(seeing=seeing,airmass=airmass) - for q in range(frame.nspec) : - fiber_calib=np.interp(frame.wave[q],fluxcalib.wave,exposure_calib)*exptime - inv_calib = (fiber_calib>0)/(fiber_calib + (fiber_calib==0)) - frame.flux[q] *= inv_calib - frame.ivar[q] *= fiber_calib**2*(fiber_calib>0) - - write_qframe(outputfile,frame) - log.info("Wrote flux calibrated frame file %s after %s"%(outputfile,self.name)) - - return frame - -class ResolutionFit(pas.PipelineAlg): - - """ - Fitting of Arc lines on extracted arc spectra, polynomial expansion of the fitted sigmas, and updating - the coefficients to the new traceset file - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="ResolutionFit" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "PSFoutfile" not in kwargs: - #raise qlexceptions.ParameterException("Missing psfoutfile in the arguments") - log.critical("Missing psfoutfile in the arguments!") - sys.exit() - - psfoutfile=kwargs["PSFoutfile"] - psfinfile=kwargs["PSFinputfile"] - - if "usesigma" in kwargs: - usesigma=kwargs["usesigma"] - else: usesigma = False - - tset = read_xytraceset(psfinfile) - domain=(tset.wavemin,tset.wavemax) - - input_frame=args[0] - - linelist=None - if "Linelist" in kwargs: - linelist=kwargs["Linelist"] - - npoly=2 - if "NPOLY" in kwargs: - npoly=kwargs["NPOLY"] - nbins=2 - if "NBINS" in kwargs: - nbins=kwargs["NBINS"] - - return self.run_pa(input_frame,psfinfile,psfoutfile,usesigma,linelist=linelist,npoly=npoly,nbins=nbins,domain=domain) - - def run_pa(self,input_frame,psfinfile,outfile,usesigma,linelist=None,npoly=2,nbins=2,domain=None): - from desispec.quicklook.arcprocess import process_arc,write_psffile - from desispec.quicklook.palib import get_resolution - - wcoeffs,wavemin,wavemax =process_arc(input_frame,linelist=linelist,npoly=npoly,nbins=nbins,domain=domain) - write_psffile(psfinfile,wcoeffs,wavemin,wavemax,outfile) - log.debug("Wrote xytraceset file {}".format(outfile)) - - #- update the arc frame resolution from new coeffs - tset = read_xytraceset(outfile) - input_frame.resolution_data=get_resolution(input_frame.wave,input_frame.nspec,tset,usesigma=usesigma) - - return (tset,input_frame) - - -# ======================= -# qproc algorithms -# ======================= - -from desispec.sky import SkyModel -from desispec.qproc.io import write_qframe -from desispec.qproc.qextract import qproc_boxcar_extraction -from desispec.qproc.qfiberflat import qproc_apply_fiberflat -from desispec.qproc.qsky import qproc_sky_subtraction - -class Extract_QP(pas.PipelineAlg): - - - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="Extract_QP" - pas.PipelineAlg.__init__(self,name,im,fr,config,logger) - - def run(self,*args,**kwargs): - - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "PSFFile" not in kwargs: - #raise qlexceptions.ParameterException("Need PSF File") - log.critical("Need PSF file!") - sys.exit() - - input_image=args[0] - - dumpfile=None - if "dumpfile" in kwargs: - dumpfile=kwargs["dumpfile"] - - psf_filename=kwargs["PSFFile"] - print("psf_filename=",psf_filename) - - traceset = read_xytraceset(psf_filename) - - width=kwargs["FullWidth"] - nspec=kwargs["Nspec"] - if "Wavelength" not in kwargs: - wstart = np.ceil(traceset.wavemin) - wstop = np.floor(traceset.wavemax) - dw = 0.5 - else: - wavelength=kwargs["Wavelength"] - print('kwargs["Wavelength"]=',kwargs["Wavelength"]) - if kwargs["Wavelength"] is not None: #- should be in wstart,wstop,dw format - wstart, wstop, dw = [float(w) for w in wavelength] - else: - wstart = np.ceil(traceset.wmin) - wstop = np.floor(traceset.wmax) - dw = 0.5 - wave = np.arange(wstart, wstop+dw/2.0, dw) - if "Specmin" not in kwargs: - specmin=0 - else: - specmin=kwargs["Specmin"] - if kwargs["Specmin"] is None: - specmin=0 - - if "Nspec" not in kwargs: - nspec = traceset.nspec - else: - nspec=kwargs["Nspec"] - if nspec is None: - nspec=traceset.nspec - - specmax = specmin + nspec - - camera = input_image.meta['CAMERA'].lower() #- b0, r1, .. z9 - spectrograph = int(camera[1]) - fibermin = spectrograph*500 + specmin - if "FiberMap" not in kwargs: - fibermap = None - fibers = np.arange(fibermin, fibermin+nspec, dtype='i4') - else: - fibermap=kwargs["FiberMap"] - fibermap = fibermap[fibermin:fibermin+nspec] - fibers = fibermap['FIBER'] - if "Outfile" in kwargs: - outfile=kwargs["Outfile"] - else: - outfile=None - maskFile=None - if "MaskFile" in kwargs: - maskFile=kwargs['MaskFile'] - - #- Add some header keys relevant for this extraction - input_image.meta['NSPEC'] = (nspec, 'Number of spectra') - input_image.meta['WAVEMIN'] = (wstart, 'First wavelength [Angstroms]') - input_image.meta['WAVEMAX'] = (wstop, 'Last wavelength [Angstroms]') - input_image.meta['WAVESTEP']= (dw, 'Wavelength step size [Angstroms]') - - - - return self.run_pa(input_image,traceset,wave,width,nspec, - fibers=fibers,fibermap=fibermap,dumpfile=dumpfile, - maskFile=maskFile) - - def run_pa(self,input_image,traceset,outwave,width,nspec, - fibers=None,fibermap=None,dumpfile=None, - maskFile=None): - - qframe = qproc_boxcar_extraction(traceset,input_image,fibers=fibers, width=width, fibermap=fibermap) - - if dumpfile is not None: - write_qframe(dumpfile, qframe, fibermap=fibermap) - log.debug("Wrote intermediate file %s after %s"%(dumpfile,self.name)) - - return qframe - - - def get_default_config(self): - return {("FullWidth",7,"Boxcar full width"), - ("PSFFile","%%PSFFile","PSFFile to use"), - ("DeltaW",0.5,"Binwidth of extrapolated wavelength array"), - ("Nspec",500,"number of spectra to extract") - } - -class ComputeFiberflat_QP(pas.PipelineAlg): - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="ComputeFiberflat" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - raise qlexceptions.ParameterException("Missing input parameter") - if not self.is_compatible(type(args[0])): - raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - input_frame=args[0] #- frame object to calculate fiberflat from - if "outputFile" not in kwargs: - raise qlexceptions.ParameterException("Need output file name to write fiberflat File") - outputfile=kwargs["outputFile"] - - return self.run_pa(input_frame,outputfile) - - def run_pa(self,qframe,outputfile): - from desispec.qproc.qfiberflat import qproc_compute_fiberflat - import desispec.io.fiberflat as ffIO - - fibflat=qproc_compute_fiberflat(qframe) - - ffIO.write_fiberflat(outputfile,fibflat,header=qframe.meta) - log.info("Wrote fiberflat file {}".format(outputfile)) - - fflatfile = ffIO.read_fiberflat(outputfile) - - return fflatfile - -class ApplyFiberFlat_QP(pas.PipelineAlg): - """ - PA to Apply the fiberflat field (QP) to the given qframe - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="Apply FiberFlat" - pas.PipelineAlg.__init__(self,name,fr,fr,config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - if "FiberFlatFile" not in kwargs: - #raise qlexceptions.ParameterException("Need Fiberflat file") - log.critical("Need Fiberflat file!") - sys.exit() - - input_qframe=args[0] - - dumpfile=None - if "dumpfile" in kwargs: - dumpfile=kwargs["dumpfile"] - - fiberflat=kwargs["FiberFlatFile"] - - return self.run_pa(input_qframe,fiberflat,dumpfile=dumpfile) - - def run_pa(self,qframe,fiberflat,dumpfile=None): - - - qproc_apply_fiberflat(qframe,fiberflat) - - if dumpfile is not None: - night = qframe.meta['NIGHT'] - expid = qframe.meta['EXPID'] - write_qframe(dumpfile, qframe) - log.debug("Wrote intermediate file %s after %s"%(dumpfile,self.name)) - - return qframe - -class SkySub_QP(pas.PipelineAlg): - """ - Sky subtraction. The input frame object should be fiber flat corrected. - No sky model is saved for now - """ - def __init__(self,name,config,logger=None): - if name is None or name.strip() == "": - name="SkySub_QP" - pas.PipelineAlg.__init__(self,name,fr,type(tuple),config,logger) - - def run(self,*args,**kwargs): - if len(args) == 0 : - #raise qlexceptions.ParameterException("Missing input parameter") - log.critical("Missing input parameter!") - sys.exit() - if not self.is_compatible(type(args[0])): - #raise qlexceptions.ParameterException("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - log.critical("Incompatible input!") - sys.exit("Incompatible input. Was expecting %s got %s"%(type(self.__inpType__),type(args[0]))) - - input_qframe=args[0] #- this must be flat field applied before sky subtraction in the pipeline - - dumpfile=None - if "dumpfile" in kwargs: - dumpfile=kwargs["dumpfile"] - - #- now do the subtraction - return self.run_pa(input_qframe,dumpfile=dumpfile) - - def run_pa(self,qframe,dumpfile=None): - - skymodel = qproc_sky_subtraction(qframe,return_skymodel=True) - #qproc_sky_subtraction(qframe) - - if dumpfile is not None: - night = qframe.meta['NIGHT'] - expid = qframe.meta['EXPID'] - write_qframe(dumpfile, qframe) - log.debug("Wrote intermediate file %s after %s"%(dumpfile,self.name)) - - # convert for QA -# sframe=qframe.asframe() -# tmpsky=np.interp(sframe.wave,qframe.wave[0],skymodel[0]) -# skymodel = SkyModel(sframe.wave,np.tile(tmpsky,(sframe.nspec,1)),np.ones(sframe.flux.shape),np.zeros(sframe.flux.shape,dtype="int32")) - - return (qframe,skymodel) diff --git a/deprecated/py/desispec/quicklook/qas.py b/deprecated/py/desispec/quicklook/qas.py deleted file mode 100644 index a4b07852e..000000000 --- a/deprecated/py/desispec/quicklook/qas.py +++ /dev/null @@ -1,307 +0,0 @@ -""" -desispec.quicklook.qas -====================== - -""" -from desispec.quicklook import qllogger -from desispec.quicklook import qlexceptions -import collections -import numpy as np -from enum import Enum -from astropy.io import fits - - -class QASeverity(Enum): - ALARM=30 - WARNING=20 - NORMAL=0 - -class MonitoringAlg: - """ Simple base class for monitoring algorithms """ - def __init__(self,name,inptype,config,logger=None): - if logger is None: - self.m_log=qllogger.QLLogger().getlog(name) - else: - self.m_log=logger - self.__inpType__=type(inptype) - self.name=name - self.config=config - self.__deviation = None - self.m_log.debug("initializing Monitoring alg {}".format(name)) - - def __call__(self,*args,**kwargs): - res=self.run(*args,**kwargs) - cargs=self.config['kwargs'] - params=cargs['param'] - - metrics=res["METRICS"] if 'METRICS' in res else None - if metrics is None: - metrics={} - res["METRICS"]=metrics - - reskey="RESULT" - QARESULTKEY="QA_STATUS" - if res['FLAVOR'] == 'science': - REFNAME = cargs["RESULTKEY"]+'_'+format(res['PROGRAM']).upper()+'_REF' # SE: get the REF name from cargs - else: - REFNAME = cargs["RESULTKEY"]+'_REF' - - NORM_range = cargs["RESULTKEY"]+'_NORMAL_RANGE' - WARN_range = cargs["RESULTKEY"]+'_WARN_RANGE' - norm_range_val = [0,0] - warn_range_val = [0,0] - - if "QASTATUSKEY" in cargs: - QARESULTKEY=cargs["QASTATUSKEY"] - if "RESULTKEY" in cargs: - reskey=cargs["RESULTKEY"] - - if cargs["RESULTKEY"] == 'CHECKHDUS': - stats=[] - stats.append(metrics['CHECKHDUS_STATUS']) - stats.append(metrics['EXPNUM_STATUS']) - if np.isin(stats,'NORMAL').all(): - metrics[QARESULTKEY]='NORMAL' - elif np.isin(stats,'ALARM').any(): - metrics[QARESULTKEY] = 'ALARM' - - self.m_log.info("{}: {}".format(QARESULTKEY,metrics[QARESULTKEY])) - - if reskey in metrics: - current = metrics[reskey] - - #SE: Replacing this chunk (between the dashed lines) with an alternative that accomodates receiving the REF keys from the configuration ----------------------------------------------------------------------------------------------------------------- - #if "REFERENCE" in cargs: - - #refval=cargs["REFERENCE"] - -## print(refval,"MA inside if") - - #else: #- For absolute value checks - #self.m_log.warning("No reference given. STATUS will be assigned for the Absolute Value. Confirm your ranges.") - ##- check the data type - #if isinstance(current,float) or isinstance(current,np.float32) or isinstance(current,int): - #refval=0 - #else: - #refval=np.zeros(len(current)) #- 1D list or array - ##- Update PARAMS ref key - #res["PARAMS"][reskey+'_REF']=refval - - #currlist=isinstance(current,(np.ndarray,collections.Sequence)) - #reflist=isinstance(refval,(np.ndarray,collections.Sequence)) - #if currlist != reflist: # different types - #self.m_log.critical("QL {} : REFERENCE({}) and RESULT({}) are of different types!".format(self.name,type(refval),type(current))) - #elif currlist: #both are lists - #if len(refval)==len(current): - #self.__deviation=[c-r for c,r in zip(current,refval)] - #else: - #self.m_log.critical("QL {} : REFERENCE({}) and RESULT({}) are of different length!".format(self.name,len(refval),len(current))) - #else: # both are scalars - #self.__deviation=sorted(current)-sorted(refval) - - ## check RANGES given in config and set QA_STATUS keyword - ## it should be a sorted overlapping list of range tuples in the form [ ((interval),QASeverity),((-1.0,1.0),QASeverity.NORMAL),(-2.0,2.0),QAStatus.WARNING)] - ## for multiple results, thresholds should be a list of lists as given above (one range list per result) - ## intervals should be non overlapping. - ## lower bound is inclusive upper bound is exclusive - ## first matching interval will be used - ## if no interval contains the deviation, it will be set to QASeverity.ALARM - ## if RANGES or REFERENCE are not given in config, QA_STATUS will be set to UNKNOWN - #def findThr(d,t): - #val=QASeverity.ALARM - #for l in list(t): - #if d>=l[0][0] and d 1D array - #metrics[QARESULTKEY]=findThr(np.array(self.__deviation).flatten()[kk],thr) - ##metrics[QARESULTKEY]=[findThr(d,thr) for d in self.__deviation] - ##else: # each result has its own thresholds - ## metrics[QARESULTKEY]=[str(findThr(d,t)) for d,t in zip(self.__deviation,thr)] - - #else: #result is a scalar - #metrics[QARESULTKEY]=findThr(self.__deviation,thr) - #if metrics[QARESULTKEY]==QASeverity.NORMAL: - #metrics[QARESULTKEY]='NORMAL' - #elif metrics[QARESULTKEY]==QASeverity.WARNING: - #metrics[QARESULTKEY]='WARNING' - #else: - #metrics[QARESULTKEY]='ALARM' - #else: - #self.m_log.warning("No Reference checking for QA {}".format(self.name)) - - #self.m_log.info("{}: {}".format(QARESULTKEY,metrics[QARESULTKEY])) - #return res - #def run(self,*argv,**kwargs): - #pass - #def is_compatible(self,Type): - #return isinstance(Type,self.__inpType__) - #def check_reference(): - #return self.__deviation - #def get_default_config(self): - #""" return a dictionary of 3-tuples, - #field 0 is the name of the parameter - #field 1 is the default value of the parameter - #field 2 is the comment for human readable format. - #Field 2 can be used for QLF to dynamically setup the display""" - #return None - #---------------------------------------------------------------------------------------------------------- - - if REFNAME in params: #SE: get the REF value/ranges from params - - refval=params[REFNAME] - - if len(refval) ==1: - refval = refval[0] - - refval = np.asarray(refval) - current = np.asarray(current) - norm_range_val=params[NORM_range] - warn_range_val=params[WARN_range] - - #SE: just in case any nan value sneaks in the array of the scalar metrics - ind = np.argwhere(np.isnan(current)) - - if (ind.shape[0] > 0 and refval.shape[0] == current.shape[0]): - self.m_log.critical("QL {} : elements({}) of the result are returned as NaN! STATUS is determined for the real values".format(self.name,str(ind))) - - ind = list(np.hstack(ind)) - for index in sorted(ind, reverse=True): - del current[index] - del refval[index] - - else: - self.m_log.warning("No reference given. Update the configuration file to include reference value for QA: {}".format(self.name)) - - currlist=isinstance(current,(np.ndarray,collections.Sequence)) - reflist=isinstance(refval,(np.ndarray,collections.Sequence)) - - if currlist != reflist: - self.m_log.critical("QL {} : REFERENCE({}) and RESULT({}) are of different types!".format(self.name,type(refval),type(current))) - elif currlist: - - if refval.size == current.size and current.size >1: - - self.__deviation=[c-r for c,r in zip(np.sort(current),np.sort(refval))] - elif refval.size == current.size and current.size and current.size == 1: - self.__deviation = current - refval - elif np.size(current) == 0 or np.size(refval) == 0: - self.m_log.warning("No measurement is done or no reference is available for this QA!- check the configuration file for references!") - metrics[QARESULTKEY]='UNKNOWN' - self.m_log.info("{}: {}".format(QARESULTKEY,metrics[QARESULTKEY])) - elif refval.size != current.size: - self.m_log.critical("QL {} : REFERENCE({}) and RESULT({}) are of different length!".format(self.name,refval.size,current.size)) - metrics[QARESULTKEY]='UNKNOWN' - self.m_log.info("{}: {}".format(QARESULTKEY,metrics[QARESULTKEY])) - - else: - #SE "sorting" eliminate the chance of randomly shuffling items in the list that we observed in the past - self.__deviation=(np.sort(current)-np.sort(refval))/np.sort(current) - - def findThr(d,t): - if d != None and len(list(t)) >1: - val=QASeverity.ALARM - for l in list(t): - - if d>=l[0][0] and d=l and d= 2: - stats.append('ALARM') - else: - for i,val in enumerate(devlist): - if len(nofit) != 0 and i == nofit[0]: - stats.append('NORMAL') - else: - diff = refval[i] - val - if thr[0]<= diff <= thr[1]: - stats.append('NORMAL') - elif wthr[0] <= diff <= wthr[1]: - stats.append('WARNING') - else: - stats.append('ALARM') - - if np.isin(stats,'NORMAL').all(): - metrics[QARESULTKEY]='NORMAL' - elif np.isin(stats,'WARNING').any() and np.isin(stats,'ALARM').any(): - metrics[QARESULTKEY] = 'ALARM' - elif np.isin(stats,'ALARM').any(): - metrics[QARESULTKEY] = 'ALARM' - elif np.isin(stats,'WARNING').any(): - metrics[QARESULTKEY] = 'WARNING' - - self.m_log.info("{}: {}".format(QARESULTKEY,metrics[QARESULTKEY])) - - elif (len(thr)==2 and len(wthr)==2): - - if np.size(devlist)== 1: - d=[] - d.append(devlist) - devlist = d - stats = [] - for val in devlist: - if thr[0] <= val <= thr[1]: - stats.append('NORMAL') - elif wthr[0] <= val <= wthr[1]: - stats.append('WARNING') - else: - stats.append('ALARM') - - if np.isin(stats,'NORMAL').all(): - metrics[QARESULTKEY]='NORMAL' - elif np.isin(stats,'WARNING').any() and np.isin(stats,'ALARM').any(): - metrics[QARESULTKEY] = 'ALARM' - elif np.isin(stats,'ALARM').any(): - metrics[QARESULTKEY] = 'ALARM' - elif np.isin(stats,'WARNING').any(): - metrics[QARESULTKEY] = 'WARNING' - self.m_log.info("{}: {}".format(QARESULTKEY,metrics[QARESULTKEY])) - - return res - - def run(self,*argv,**kwargs): - pass - def is_compatible(self,Type): - return isinstance(Type,self.__inpType__) - def check_reference(): - return self.__deviation - def get_default_config(self): - return None diff --git a/deprecated/py/desispec/quicklook/ql_plotlib.py b/deprecated/py/desispec/quicklook/ql_plotlib.py deleted file mode 100644 index dee5a4fc0..000000000 --- a/deprecated/py/desispec/quicklook/ql_plotlib.py +++ /dev/null @@ -1,194 +0,0 @@ -""" -desispec.quicklook.ql_plotlib -============================= - -Generic plotting algorithms for QuickLook QAs. -""" -import numpy as np -import matplotlib.pyplot as plt - -def ql_qaplot(fig,plotconf,qadict,camera,expid,outfile): - """ - Get plotting configuration info and setup plots - - Args: - fig: matplotlib figure - plotconf: list of config info for each plot - qadict: QA metrics dictionary - camera, expid: to be used in output png title - outfile: output png file - - Returns: - png file containing all desired plots - """ - #- Find relevant plots in plotting configuration file - plotconfig=[] - for page in plotconf: - for plot in plotconf[page]: - if plot != 'Title': - for key in plotconf[page][plot]: - met=None - if key == 'VALS' and plotconf[page][plot]['TYPE'] == 'PATCH': - met=str(plotconf[page][plot][key]) - elif key == 'YVALS' and plotconf[page][plot]['TYPE'] == '2DPLOT': - met=str(plotconf[page][plot][key]) - elif key == 'ZVALS' and plotconf[page][plot]['TYPE'] == '3DPLOT': - met=str(plotconf[page][plot][key]) - if met and met in qadict["METRICS"]: - title=plotconf[page]['Title'] - plotconfig.append(plotconf[page][plot]) - - hardplots=False - if len(plotconfig) != 0: - #- Setup patch plot - plt.suptitle("{}, Camera: {}, Expid: {}".format(title,camera,expid),fontsize=10) - - #- Loop through all plots in configuration file - nplots=len(plotconfig) - nrow=ncol=int(np.ceil(np.sqrt(len(plotconfig)))) - for p in range(nplots): - #- Grab necessary plot config info - plot=plotconfig[p] - plottype=plot['TYPE'] - plottitle=plot['PLOT_TITLE'] - #- Optional plot config inputs - heatmap=None - if 'HEAT' in plot: - heatmap=plot['HEAT'] - xlim=None - if 'XRANGE' in plot: - xlim=plot['XRANGE'] - ylim=None - if 'YRANGE' in plot: - ylim=plot['YRANGE'] - zlim=None - if 'ZRANGE' in plot: - zlim=plot['ZRANGE'] - - #- Generate subplots - ax=fig.add_subplot('{}{}{}'.format(nrow,ncol,p+1)) - if plottype == 'PATCH': - vals=np.array(qadict['METRICS'][plot['VALS']]) - grid=plot['GRID'] - patch=ql_patchplot(ax,vals,plottitle,grid,heatmap) - fig.colorbar(patch) - if plottype == '2DPLOT': - xvals=np.array(qadict['METRICS'][plot['XVALS']]) - yvals=np.array(qadict['METRICS'][plot['YVALS']]) - xtitle=plot['XTITLE'] - ytitle=plot['YTITLE'] - ql_2dplot(ax,xvals,yvals,plottitle,xtitle,ytitle,xlim,ylim) - if plottype == '3DPLOT': - xvals=np.array(qadict['METRICS'][plot['XVALS']]) - yvals=np.array(qadict['METRICS'][plot['YVALS']]) - zvals=np.array(qadict['METRICS'][plot['ZVALS']]) - xtitle=plot['XTITLE'] - ytitle=plot['YTITLE'] - scatter=ql_3dplot(ax,xvals,yvals,zvals,plottitle,xtitle,ytitle,zlim,heatmap) - fig.colorbar(scatter) - - #- Adjust plots to fit page and output png - plt.tight_layout() - plt.subplots_adjust(top=0.88) - fig.savefig(outfile) - - #- If QA not in plot config, use hard coded plots - else: - hardplots=True - - return hardplots - -def ql_patchplot(ax,vals,plottitle,grid,heatmap=None): - """ - Make patch plot of specific metrics provided in configuration file - - Args: - ax: matplotlib subplot - vals: QA metric to be plotted - plottitle: plot title from configuration file - grid: shape of patch plot - Optional: - heat: specify color of heatmap (must conform to matplotlib) - - Returns: - matplotlib sublot containing plotted metrics - """ - #- Setup title and tick parameters - ax.set_title(plottitle,fontsize=10) - ax.tick_params(axis='x',labelsize=10,labelbottom=False) - ax.tick_params(axis='y',labelsize=10,labelleft=False) - - #- Add optional arguments - if heatmap: cmap = heatmap - else: cmap = 'OrRd' - - #- Generate patch plot - patch=ax.pcolor(vals.reshape(grid[0],grid[1]),cmap=cmap) - - return patch - -def ql_2dplot(ax,xvals,yvals,plottitle,xtitle,ytitle,xlim=None,ylim=None): - """ - Make 2d plot of specific metrics provided in configuration file - - Args: - ax: matplotlib subplot - xvals: QA metric to be plotted along the xaxis - yvals: QA metric to be plotted along the yaxis - plottitle: plot title from configuration file - xtitle: x axis label - ytitle: y axis label - Optional: - xlim: list containing x range (i.e. [x_lo,x_hi]) - ylim: list containing y range (i.e. [y_lo,y_hi]) - - Returns: - matplotlib sublot containing plotted metrics - """ - #- Set title and axis labels - ax.set_title(plottitle,fontsize=10) - ax.set_xlabel(xtitle,fontsize=10) - ax.set_ylabel(ytitle,fontsize=10) - - #- Add optional arguments - if xlim: ax.set_xlim(xlim[0],xlim[1]) - if ylim: ax.set_ylim(ylim[0],ylim[1]) - - #- Generate 2d plot - ax.plot(xvals,yvals) - - return ax - -def ql_3dplot(ax,xvals,yvals,zvals,plottitle,xtitle,ytitle,zlim=None,heatmap=None): - """ - Make 3d scatter plot of specific metrics provided in configuration file - - Args: - ax: matplotlib subplot - xvals: QA metric to be plotted along the xaxis - yvals: QA metric to be plotted along the yaxis - zvals: QA metric to be plotted - plottitle: plot title from configuration file - xtitle: x axis label - ytitle: y axis label - Optional: - zlim: list containing scatter plot range (i.e. [z_lo,z_hi]) - - Returns: - matplotlib sublot containing plotted metrics - """ - #- Setup title and axies labels - ax.set_title(plottitle,fontsize=10) - ax.set_xlabel(xtitle,fontsize=10) - ax.set_ylabel(ytitle,fontsize=10) - - #- Add optinoal arguments - if heatmap: cmap = heatmap - else: heatmap = 'bwr' - if zlim: vmin,vmax = zlim[0],zlim[1] - else: vmin,vmax = np.min(zvals), np.max(zvals) - - #- Generate 3d scatter plot - scatter=ax.scatter(xvals,yvals,c=zvals,cmap=heatmap,vmin=vmin,vmax=vmax) - - return scatter diff --git a/deprecated/py/desispec/quicklook/qlboxcar.py b/deprecated/py/desispec/quicklook/qlboxcar.py deleted file mode 100644 index ba180566c..000000000 --- a/deprecated/py/desispec/quicklook/qlboxcar.py +++ /dev/null @@ -1,138 +0,0 @@ -""" -desispec.quicklook.qlboxcar -=========================== - -Boxcar extraction for Spectra from Desi Image. -""" -from __future__ import absolute_import, division, print_function -import numpy as np -from desispec.quicklook.palib import resample_spec,get_resolution - -def do_boxcar(image,tset,outwave,boxwidth=2.5,nspec=500,maskFile=None,usesigma=False, - quick_resolution=False): - """Extracts spectra row by row, given the centroids - - Args: - image : desispec.image object - tset: desispec.xytraceset like object - outwave: wavelength array for the final spectra output - boxwidth: HW box size in pixels - usesigma: if True, use sigma from psf file (ysigma) to calculate resolution data. - quick_resolution: whether to calculate the resolution matrix or use QuickResolution object - Returns flux, ivar, resolution - """ - import math - from desispec.frame import Frame - - #wavelength=psf.wavelength() # (nspec,npix_y) - def calcMask(tset): - wmin=tset.wavemin - wmax=tset.wavemax - waves=np.arange(wmin,wmax,0.25) - xs=tset.x_vs_wave(np.arange(tset.nspec),waves) #- xtraces # doing the full image here. - ys=tset.y_vs_wave(np.arange(tset.nspec),waves) #- ytraces - - camera=image.camera - spectrograph=int(camera[1:]) #- first char is "r", "b", or "z" - imshape=image.pix.shape - mask=np.zeros((imshape[1],imshape[0])) - maxx,maxy=mask.shape - maxx=maxx-1 - maxy=maxy-1 - ranges=np.zeros((mask.shape[1],xs.shape[0]+1),dtype=int) - for bin in range(0,len(waves)): - ixmaxold=0 - for spec in range(0,xs.shape[0]): - xpos=xs[spec][bin] - ypos=int(ys[spec][bin]) - if xpos<0 or xpos>maxx or ypos<0 or ypos>maxy : - continue - xmin=xpos-boxwidth - xmax=xpos+boxwidth - ixmin=int(math.floor(xmin)) - ixmax=int(math.floor(xmax)) - if ixmin <= ixmaxold: - print("Error Box width overlaps,",xpos,ypos,ixmin,ixmaxold) - return None,None - ixmaxold=ixmax - if mask[int(xpos)][ypos]>0 : - continue - # boxing in x vals - if ixmin < 0: #int value is less than 0 - ixmin=0 - rxmin=1.0 - else:# take part of the bin depending on real xmin - rxmin=1.0-xmin+ixmin - if ixmax>maxx:# xmax is bigger than the image - ixmax=maxx - rxmax=1.0 - else: # take the part of the bin depending on real xmax - rxmax=xmax-ixmax - ranges[ypos][spec+1]=math.ceil(xmax)#end at next column - if ranges[ypos][spec]==0: - ranges[ypos][spec]=ixmin - mask[ixmin][ypos]=rxmin - for x in range(ixmin+1,ixmax): mask[x][ypos]=1.0 - mask[ixmax][ypos]=rxmax - for ypos in range(ranges.shape[0]): - lastval=ranges[ypos][0] - for sp in range(1,ranges.shape[1]): - if ranges[ypos][sp]==0: - ranges[ypos][sp]=lastval - lastval=ranges[ypos][sp] - return mask,ranges - - if maskFile is not None: - import os - if os.path.exists(maskFile) and os.path.isfile(maskFile): - f=open(maskFile,'rb') - npf=np.load(f) - mask=npf['mask'] - ranges=npf['ranges'] - print("Loading mask from file %s"%maskFile) - - else: - print("Mask file is given but doesn't exist. Generating mask and saving to file %s"%maskFile) - mask,ranges=calcMask(tset) - try: - f=open(maskFile,'wb') - np.savez(f,mask=mask,ranges=ranges) - except: - pass - else: - mask,ranges=calcMask(tset) - Tmask=mask.T - maskedimg=(image.pix*Tmask) - maskedvar=(Tmask/image.ivar.clip(1e-8)) - - flux=np.zeros((maskedimg.shape[0],ranges.shape[1]-1)) - ivar=np.zeros((maskedimg.shape[0],ranges.shape[1]-1)) - - for r in range(flux.shape[0]): - row=np.add.reduceat(maskedimg[r],ranges[r])[:-1] - flux[r]=row - vrow=np.add.reduceat(maskedvar[r],ranges[r])[:-1] - ivar[r]=1/vrow - - wtarget=outwave - #- limit nspec to tset.nspec max - if nspec > tset.nspec: - nspec=tset.nspec - print("Warning! Extracting only {} spectra".format(tset.nspec)) - - fflux=np.zeros((nspec,len(wtarget))) - iivar=np.zeros((nspec,len(wtarget))) - - #- convert to per angstrom first and then resample to desired wave length grid. - - for spec in range(nspec): - ww=tset.wave_vs_y(spec,np.arange(0,tset.npix_y)) - dwave=np.gradient(ww) - flux[:,spec]/=dwave - ivar[:,spec]*=dwave**2 - fflux[spec,:],iivar[spec,:]=resample_spec(ww,flux[:,spec],wtarget,ivar[:,spec]) - - #- Get resolution from the psf - resolution=get_resolution(wtarget,nspec,tset,usesigma=usesigma) - - return fflux,iivar,resolution diff --git a/deprecated/py/desispec/quicklook/qlconfig.py b/deprecated/py/desispec/quicklook/qlconfig.py deleted file mode 100644 index cfc9f49b3..000000000 --- a/deprecated/py/desispec/quicklook/qlconfig.py +++ /dev/null @@ -1,519 +0,0 @@ -""" -desispec.quicklook.qlconfig -=========================== - -""" -import numpy as np -import json -import yaml -import astropy.io.fits as pyfits -from desiutil.log import get_logger -from desispec.io import findfile -from desispec.calibfinder import CalibFinder -import os,sys -from desispec.quicklook import qlexceptions,qllogger - -class Config(object): - """ - A class to generate Quicklook configurations for a given desi exposure. - expand_config will expand out to full format as needed by quicklook.setup - """ - def __init__(self, configfile, night, camera, expid, singqa, amps=True,rawdata_dir=None,specprod_dir=None, outdir=None,qlf=False,psfid=None,flatid=None,templateid=None,templatenight=None,qlplots=False,store_res=None): - """ - configfile: a configuration file for QL eg: desispec/data/quicklook/qlconfig_dark.yaml - night: night for the data to process, eg.'20191015' - camera: which camera to process eg 'r0' - expid: exposure id for the image to be processed - amps: for outputing amps level QA - Note: - rawdata_dir and specprod_dir: if not None, overrides the standard DESI convention - """ - with open(configfile, 'r') as f: - self.conf = yaml.safe_load(f) - f.close() - self.night = night - self.expid = expid - self.psfid = psfid - self.flatid = flatid - self.templateid = templateid - self.templatenight = templatenight - self.camera = camera - self.singqa = singqa - self.amps = amps - self.rawdata_dir = rawdata_dir - self.specprod_dir = specprod_dir - self.outdir = outdir - self.flavor = self.conf["Flavor"] - - #- Options to write out frame, fframe, preproc, and sky model files - self.dumpintermediates = False - self.writepreprocfile = self.conf["WritePreprocfile"] - self.writeskymodelfile = False - - self.plotconf = None - self.hardplots = False - #- Load plotting configuration file - if qlplots != 'noplots' and qlplots is not None: - with open(qlplots, 'r') as pf: - self.plotconf = yaml.safe_load(pf) - pf.close() - #- Use hard coded plotting algorithms - elif qlplots is None: - self.hardplots = True - - # Use --resolution to store full resolution informtion - if store_res: - self.usesigma = True - else: - self.usesigma = False - - self.pipeline = self.conf["Pipeline"] - self.algorithms = self.conf["Algorithms"] - self._palist = Palist(self.pipeline,self.algorithms) - self.pamodule = self._palist.pamodule - self.qamodule = self._palist.qamodule - - algokeys = self.algorithms.keys() - - # Extract mapping of scalar/refence key names for each QA - qaRefKeys = {} - for i in algokeys: - for k in self.algorithms[i]["QA"].keys(): - if k == "Check_HDUs": - qaRefKeys[k] = "CHECKHDUS" - qaparams=self.algorithms[i]["QA"][k]["PARAMS"] - for par in qaparams.keys(): - if "NORMAL_RANGE" in par: - scalar = par.replace("_NORMAL_RANGE","") - qaRefKeys[k] = scalar - - # Special additional parameters to read in. - self.wavelength = None - for key in ["BoxcarExtract","Extract_QP"] : - if key in self.algorithms.keys(): - if "wavelength" in self.algorithms[key].keys(): - self.wavelength = self.algorithms[key]["wavelength"][self.camera[0]] - - self._qlf=qlf - qlog=qllogger.QLLogger(name="QLConfig") - self.log=qlog.getlog() - self._qaRefKeys = qaRefKeys - - @property - def palist(self): - """ palist for this config - see :class: `Palist` for details. - """ - return self._palist.palist - - @property - def qalist(self): - """ qalist for the given palist - """ - return self._palist.qalist - - @property - def paargs(self,psfspfile=None): - """ - Many arguments for the PAs are taken default. Some of these may need to be variable - psfspfile is for offline extraction case - """ - wavelength=self.wavelength - if self.wavelength is None: - #- setting default wavelength for extraction for different cam - if self.camera[0] == 'b': - self.wavelength='3570,5730,0.8' - elif self.camera[0] == 'r': - self.wavelength='5630,7740,0.8' - elif self.camera[0] == 'z': - self.wavelength='7420,9830,0.8' - - #- Make kwargs less verbose using '%%' marker for global variables. Pipeline will map them back - - peaks=None - if 'Initialize' in self.algorithms.keys(): - if 'PEAKS' in self.algorithms['Initialize'].keys(): - peaks=self.algorithms['Initialize']['PEAKS'] - if self.flavor == 'bias' or self.flavor == 'dark': - paopt_initialize={'Flavor':self.flavor,'Camera':self.camera} - else: - paopt_initialize={'Flavor':self.flavor,'FiberMap':self.fibermap,'Camera':self.camera,'Peaks':peaks} - - if self.writepreprocfile: - preprocfile=self.dump_pa("Preproc") - else: - preprocfile = None - paopt_preproc={'camera': self.camera,'dumpfile': preprocfile} - - if self.dumpintermediates: - framefile=self.dump_pa("BoxcarExtract") - fframefile=self.dump_pa("ApplyFiberFlat_QL") - qlsframefile=self.dump_pa("SkySub_QL") - qframefile=self.dump_pa("Extract_QP") - fframefile=self.dump_pa("ApplyFiberFlat_QP") - sframefile=self.dump_pa("SkySub_QP") - - else: - qframefile=None - framefile=None - fframefile=None - qlsframefile=None - sframefile=None - - if self.flavor == 'arcs': - arcimg=findfile('preproc',night=self.night,expid=self.expid,camera=self.camera,specprod_dir=self.specprod_dir) - flatimg=self.fiberflat - psffile=findfile('psf',expid=self.expid,night=self.night,camera=self.camera,specprod_dir=self.specprod_dir) - else: - arcimg=None - flatimg=None - psffile=None - - preproc_file=findfile('preproc',self.night,self.expid,self.camera,specprod_dir=self.specprod_dir) - paopt_flexure={'preprocFile':preproc_file, 'inputPSFFile': self.calibpsf, 'outputPSFFile': self.psf_filename} - - paopt_extract={'Flavor': self.flavor, 'BoxWidth': 2.5, 'FiberMap': self.fibermap, 'Wavelength': self.wavelength, 'Nspec': 500, 'PSFFile': self.calibpsf,'usesigma': self.usesigma, 'dumpfile': framefile} - - paopt_extract_qp={'Flavor': self.flavor, 'FullWidth': 7, 'FiberMap': self.fibermap, 'Wavelength': self.wavelength, 'Nspec': 500, 'PSFFile': self.psf_filename,'usesigma': self.usesigma, 'dumpfile': qframefile} - - paopt_resfit={'PSFinputfile': self.psf_filename, 'PSFoutfile': psffile, 'usesigma': self.usesigma} - - paopt_comflat={'outputFile': self.fiberflat} - - paopt_apfflat={'FiberFlatFile': self.fiberflat, 'dumpfile': fframefile} - - cframefile=self.dump_pa("ApplyFluxCalibration") - paopt_fluxcal={'outputfile': cframefile} - - if self.writeskymodelfile: - outskyfile = findfile('sky',night=self.night,expid=self.expid, camera=self.camera, rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir,outdir=self.outdir) - else: - outskyfile=None - paopt_skysub={'Outskyfile': outskyfile, 'dumpfile': qlsframefile, 'Apply_resolution': self.usesigma} - paopt_skysub_qp={'dumpfile': sframefile, 'Apply_resolution': False} - - paopts={} - defList={ - 'Initialize':paopt_initialize, - 'Preproc':paopt_preproc, - 'Flexure':paopt_flexure, - 'BoxcarExtract':paopt_extract, - 'ResolutionFit':paopt_resfit, - 'Extract_QP':paopt_extract_qp, - 'ComputeFiberflat_QL':paopt_comflat, - 'ComputeFiberflat_QP':paopt_comflat, - 'ApplyFiberFlat_QL':paopt_apfflat, - 'ApplyFiberFlat_QP':paopt_apfflat, - 'SkySub_QL':paopt_skysub, - 'SkySub_QP':paopt_skysub_qp, - 'ApplyFluxCalibration':paopt_fluxcal - } - - def getPAConfigFromFile(PA,algs): - def mergeDicts(source,dest): - for k in source: - if k not in dest: - dest[k]=source[k] - userconfig={} - if PA in algs: - fc=algs[PA] - for k in fc: #do a deep copy leave QA config out - if k != "QA": - userconfig[k]=fc[k] - defconfig={} - if PA in defList: - defconfig=defList[PA] - mergeDicts(defconfig,userconfig) - return userconfig - - for PA in self.palist: - paopts[PA]=getPAConfigFromFile(PA,self.algorithms) - #- Ignore intermediate dumping and write explicitly the outputfile for - self.outputfile=self.dump_pa(self.palist[-1]) - - return paopts - - def dump_pa(self,paname): - """ - dump the PA outputs to respective files. This has to be updated for fframe and sframe files as QL anticipates for dumpintermediate case. - """ - pafilemap={'Preproc': 'preproc', 'Flexure': None, 'BoxcarExtract': 'frame','ResolutionFit': None, 'Extract_QP': 'qframe', 'ComputeFiberflat_QL': 'fiberflat', 'ComputeFiberflat_QP': 'fiberflat', 'ApplyFiberFlat_QL': 'fframe', 'ApplyFiberFlat_QP': 'fframe', 'SkySub_QL': 'sframe', 'SkySub_QP': 'sframe', 'ApplyFluxCalibration': 'cframe'} - - if paname in pafilemap: - filetype=pafilemap[paname] - else: - raise IOError("PA name does not match any file type. Check PA name in config") - - pafile=None - if filetype is not None: - pafile=findfile(filetype,night=self.night,expid=self.expid,camera=self.camera,rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir,outdir=self.outdir) - - return pafile - - def dump_qa(self): - """ - yaml outputfile for the set of qas for a given pa - Name and default locations of files are handled by desispec.io.meta.findfile - """ - #- QA level outputs - #qa_outfile = {} - qa_outfig = {} - for PA in self.palist: - for QA in self.qalist[PA]: - #qa_outfile[QA] = self.io_qa(QA)[0] - qa_outfig[QA] = self.io_qa(QA)[1] - - #- make path if needed - path = os.path.normpath(os.path.dirname(qa_outfig[QA])) - if not os.path.exists(path): - os.makedirs(path) - - return (qa_outfig) -# return ((qa_outfile,qa_outfig),(qa_pa_outfile,qa_pa_outfig)) - - @property - def qaargs(self): - qaopts = {} - referencemetrics=[] - for PA in self.palist: - for qa in self.qalist[PA]: #- individual QA for that PA - pa_yaml = PA.upper() - params=self._qaparams(qa) - qaopts[qa]={'night' : self.night, 'expid' : self.expid, - 'camera': self.camera, 'paname': PA, 'PSFFile': self.psf_filename, - 'amps': self.amps, #'qafile': self.dump_qa()[0][qa], - 'qafig': self.dump_qa()[qa], 'FiberMap': self.fibermap, - 'param': params, 'refKey':self._qaRefKeys[qa], - 'singleqa' : self.singqa, - 'plotconf':self.plotconf, 'hardplots': self.hardplots - } - if qa == 'Calc_XWSigma': - qaopts[qa]['Peaks']=self.algorithms['Initialize']['PEAKS'] - qaopts[qa]['Flavor']=self.flavor - qaopts[qa]['PSFFile']=self.calibpsf - if qa == 'Sky_Peaks': - qaopts[qa]['Peaks']=self.algorithms['Initialize']['PEAKS'] - if self.singqa is not None: - qaopts[qa]['rawdir']=self.rawdata_dir - qaopts[qa]['specdir']=self.specprod_dir - if qa == 'Sky_Residual': - skyfile = findfile('sky',night=self.night,expid=self.expid, camera=self.camera, rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir,outdir=self.outdir) - qaopts[qa]['SkyFile']=skyfile - - if self.reference != None: - refkey=qaopts[qa]['refKey'] - for padict in range(len(self.reference)): - pa_metrics=self.reference[padict].keys() - if refkey in pa_metrics: - qaopts[qa]['ReferenceMetrics']={'{}'.format(refkey): self.reference[padict][refkey]} - return qaopts - - def _qaparams(self,qa): - params={} - if self.algorithms is not None: - for PA in self.palist: - if qa in self.qalist[PA]: - params[qa]=self.algorithms[PA]['QA'][qa]['PARAMS'] - else: - # RK: Need to settle optimal error handling in cases like this. - raise qlexceptions.ParameterException("Run time PARAMs not provided for QA") - - return params[qa] - - def io_qa_pa(self,paname): - """ - Specify the filenames: json and png of the pa level qa files" - """ - filemap={'Initialize': 'initial', - 'Preproc': 'preproc', - 'Flexure': 'flexure', - 'BoxcarExtract': 'boxextract', - 'Extract_QP': 'extractqp', - 'ComputeFiberflat_QL': 'computeflat', - 'ComputeFiberflat_QP': 'computeflatqp', - 'ApplyFiberFlat_QL': 'fiberflat', - 'ApplyFiberFlat_QP': 'fiberflatqp', - 'SkySub_QL': 'skysub', - 'SkySub_QP': 'skysubqp', - 'ResolutionFit': 'resfit', - 'ApplyFluxCalibration': 'fluxcalib' - } - - if paname in filemap: - outfile=findfile('ql_file',night=self.night,expid=self.expid, camera=self.camera, rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir,outdir=self.outdir) - outfile=outfile.replace('qlfile',filemap[paname]) - outfig=findfile('ql_fig',night=self.night,expid=self.expid, camera=self.camera, rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir,outdir=self.outdir) - outfig=outfig.replace('qlfig',filemap[paname]) - else: - raise IOError("PA name does not match any file type. Check PA name in config for {}".format(paname)) - - return (outfile,outfig) - - - def io_qa(self,qaname): - """ - Specify the filenames: json and png for the given qa output - """ - filemap={'Check_HDUs':'checkHDUs', - 'Trace_Shifts':'trace', - 'Bias_From_Overscan': 'getbias', - 'Get_RMS' : 'getrms', - 'Count_Pixels': 'countpix', - 'Calc_XWSigma': 'xwsigma', - 'CountSpectralBins': 'countbins', - 'Sky_Continuum': 'skycont', - 'Sky_Rband': 'skyRband', - 'Sky_Peaks': 'skypeak', - 'Sky_Residual': 'skyresid', - 'Integrate_Spec': 'integ', - 'Calculate_SNR': 'snr', - 'Check_Resolution': 'checkres', - 'Check_FiberFlat': 'checkfibflat' - } - - if qaname in filemap: - outfile=findfile('ql_file',night=self.night,expid=self.expid, camera=self.camera, rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir,outdir=self.outdir) - outfile=outfile.replace('qlfile',filemap[qaname]) - outfig=findfile('ql_fig',night=self.night,expid=self.expid, camera=self.camera, rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir,outdir=self.outdir) - outfig=outfig.replace('qlfig',filemap[qaname]) - else: - raise IOError("QA name does not match any file type. Check QA name in config for {}".format(qaname)) - - return (outfile,outfig) - - def expand_config(self): - """ - config: desispec.quicklook.qlconfig.Config object - """ - self.log.debug("Building Full Configuration") - self.debuglevel = self.conf["Debuglevel"] - self.period = self.conf["Period"] - self.timeout = self.conf["Timeout"] - - #- some global variables: - self.rawfile=findfile("raw",night=self.night,expid=self.expid,camera=self.camera,rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir) - - self.fibermap=None - if self.flavor != 'bias' and self.flavor != 'dark': - self.fibermap=findfile("fibermap", night=self.night,expid=self.expid,camera=self.camera,rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir) - - hdulist=pyfits.open(self.rawfile) - primary_header=hdulist[0].header - camera_header =hdulist[self.camera].header - - self.program=primary_header['PROGRAM'] - - hdulist.close() - - cfinder = CalibFinder([camera_header,primary_header]) - if self.flavor == 'dark' or self.flavor == 'bias' or self.flavor == 'zero': - self.calibpsf=None - else: - self.calibpsf=cfinder.findfile("PSF") - - if self.psfid is None: - self.psf_filename=findfile('psf',night=self.night,expid=self.expid,camera=self.camera,rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir) - else: - self.psf_filename=findfile('psf',night=self.night,expid=self.psfid,camera=self.camera,rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir) - - if self.flavor == 'dark' or self.flavor == 'bias' or self.flavor == 'zero': - self.fiberflat=None - elif self.flatid is None and self.flavor != 'flat': - self.fiberflat=cfinder.findfile("FIBERFLAT") - elif self.flavor == 'flat': - self.fiberflat=findfile('fiberflat',night=self.night,expid=self.expid,camera=self.camera,rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir) - else: - self.fiberflat=findfile('fiberflat',night=self.night,expid=self.flatid,camera=self.camera,rawdata_dir=self.rawdata_dir,specprod_dir=self.specprod_dir) - - #SE: QL no longer get references from a template or merged json - #- Get reference metrics from template json file - self.reference=None - - outconfig={} - outconfig['Night'] = self.night - outconfig['Program'] = self.program - outconfig['Flavor'] = self.flavor - outconfig['Camera'] = self.camera - outconfig['Expid'] = self.expid - outconfig['DumpIntermediates'] = self.dumpintermediates - outconfig['FiberMap'] = self.fibermap - outconfig['Period'] = self.period - - pipeline = [] - for ii,PA in enumerate(self.palist): - pipe={} - pipe['PA'] = {'ClassName': PA, 'ModuleName': self.pamodule, 'kwargs': self.paargs[PA]} - pipe['QAs']=[] - for jj, QA in enumerate(self.qalist[PA]): - pipe_qa={'ClassName': QA, 'ModuleName': self.qamodule, 'kwargs': self.qaargs[QA]} - pipe['QAs'].append(pipe_qa) - pipe['StepName']=PA - pipeline.append(pipe) - - outconfig['PipeLine'] = pipeline - outconfig['RawImage'] = self.rawfile - outconfig['singleqa'] = self.singqa - outconfig['Timeout'] = self.timeout - outconfig['FiberFlatFile'] = self.fiberflat - outconfig['PlotConfig'] = self.plotconf - - #- Check if all the files exist for this QL configuraion - check_config(outconfig,self.singqa) - return outconfig - -def check_config(outconfig,singqa): - """ - Given the expanded config, check for all possible file existence etc.... - """ - if singqa is None: - qlog=qllogger.QLLogger(name="QLConfig") - log=qlog.getlog() - log.info("Checking if all the necessary files exist.") - - if outconfig["Flavor"]=='science': - files = [outconfig["RawImage"], outconfig["FiberMap"], outconfig["FiberFlatFile"]] - for thisfile in files: - if not os.path.exists(thisfile): - sys.exit("File does not exist: {}".format(thisfile)) - else: - log.info("File check: Okay: {}".format(thisfile)) - elif outconfig["Flavor"]=="flat": - files = [outconfig["RawImage"], outconfig["FiberMap"]] - for thisfile in files: - if not os.path.exists(thisfile): - sys.exit("File does not exist: {}".format(thisfile)) - else: - log.info("File check: Okay: {}".format(thisfile)) - log.info("All necessary files exist for {} configuration.".format(outconfig["Flavor"])) - - return - -class Palist(object): - """ - Generate PA list and QA list for the Quicklook Pipeline for the given exposure - """ - def __init__(self,thislist=None,algorithms=None): - """ - thislist: given list of PAs - algorithms: Algorithm list coming from config file: e.g desispec/data/quicklook/qlconfig_dark.yaml - flavor: only needed if new list is to be built. - mode: online offline? - """ - self.thislist=thislist - self.algorithms=algorithms - self.palist=self._palist() - self.qalist=self._qalist() - - def _palist(self): - palist=self.thislist - self.pamodule='desispec.quicklook.procalgs' - return palist - - def _qalist(self): - qalist={} - for PA in self.thislist: - qalist[PA]=self.algorithms[PA]['QA'].keys() - self.qamodule='desispec.qa.qa_quicklook' - return qalist - - diff --git a/deprecated/py/desispec/quicklook/qlexceptions.py b/deprecated/py/desispec/quicklook/qlexceptions.py deleted file mode 100644 index 67f77dc09..000000000 --- a/deprecated/py/desispec/quicklook/qlexceptions.py +++ /dev/null @@ -1,12 +0,0 @@ -""" -desispec.quicklook.qlexceptions -=============================== - -Exception classes for Quicklook. -""" - -class ParameterException(Exception): - def __init__(self,value): - self.value=value - def __str__(self): - return "Parameter Exception: %s"%(repr(self.value)) diff --git a/deprecated/py/desispec/quicklook/qlheartbeat.py b/deprecated/py/desispec/quicklook/qlheartbeat.py deleted file mode 100644 index 19c5cd6b5..000000000 --- a/deprecated/py/desispec/quicklook/qlheartbeat.py +++ /dev/null @@ -1,67 +0,0 @@ -""" -desispec.quicklook.qlheartbeat -============================== - -""" -from threading import Thread -import time - -class QLHeartbeat: - def __init__(self,logger,beatinterval,timeout,precision=0.1,level=20): - self.__logger__=logger - self.__bint__=beatinterval - self.__timeout__=timeout - self.__message__="Heartbeat" - self.__thread__=None - self.__keep_running__=False - self.__precision__=precision - self.__running__=False - self.__level=level # set the message level for the heart beat - def __del__(self): - if self.__running__: - self.stop() - - def start(self,message,bint=None,timeout=None): - self.__message__=message - tnow=time.time() - self.__tstart__=tnow - if timeout is None: - ttimeout=self.__tstart__+self.__timeout__ - else: - ttimeout=self.__tstart__+timeout - self.__timeout__=timeout - if bint is None: - tstep=self.__tstart__+self.__bint__ - else: - tstep=self.__tstart__+bint - self.__bint__=bint - if self.__running__: - self.stop() - self.__logger__.log(self.__level,self.__message__) - self.__keep_running__=True - loop=lambda self: self.doloop() - self.__thread__=Thread(None,target=loop,args=[self]) - self.__thread__.daemon=True - self.__thread__.start() - self.__running__=True - - def doloop(self): - tnow=self.__tstart__ - ttimeout=self.__tstart__+self.__timeout__ - beattime=self.__tstart__+self.__bint__ - while(self.__keep_running__ and tnow3000 : #time change >+1hrs -beatinterval - self.__logger__.log(self.__level+10,"Clock skew detected") - tnow=tn - if tnow>=beattime: - beattime+=self.__bint__ - self.__logger__.log(self.__level,self.__message__) - def stop(self,msg=None): - self.__keep_running__=False - self.__thread__.join() - self.__running__=False - if msg is not None: - self.__logger__.log(self.__level,msg) diff --git a/deprecated/py/desispec/quicklook/qllogger.py b/deprecated/py/desispec/quicklook/qllogger.py deleted file mode 100644 index 07c77a13e..000000000 --- a/deprecated/py/desispec/quicklook/qllogger.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -desispec.quicklook.qllogger -=========================== - -""" -import logging -#from datetime import datetime - -class QLLogger: - """ Simple logger class using logging """ - __loglvl__=None - __loggername__="QuickLook" - def __init__(self,name=None,loglevel=logging.INFO): - if name is not None: - self.__loggername__=name - if QLLogger.__loglvl__ is None: #set singleton - QLLogger.__loglvl__=loglevel - self.__loglvl__=QLLogger.__loglvl__ - format = '%(asctime)-15s %(name)s %(levelname)s : %(message)s' - logging.basicConfig(format=format,level=self.__loglvl__) - def getlog(self,name=None): - if name is None: - loggername=self.__loggername__ - else: - loggername=name - return logging.getLogger(loggername) - diff --git a/deprecated/py/desispec/quicklook/qlpsf.py b/deprecated/py/desispec/quicklook/qlpsf.py deleted file mode 100644 index dd0f7d456..000000000 --- a/deprecated/py/desispec/quicklook/qlpsf.py +++ /dev/null @@ -1,140 +0,0 @@ -""" -desispec.quicklook.qlpsf -======================== - -Given a psf output file e.g. output from bootcalib.write_psf or desimodel/data/specpsf/PSF files -this defines an interface that other codes can use the trace and wavelength solutions. - -Mostly making parallel to specter.psf.PSF baseclass and inheriting as needed, but only xtrace, -ytrace and wavelength solution available for this case. No resolution information yet. -""" - -import numbers -import numpy as np -from desiutil import funcfits as dufits -from numpy.polynomial.legendre import Legendre,legval,legfit -import astropy.io.fits as fits -import scipy.optimize - -from desispec.io.xytraceset import read_xytraceset - -class PSF(object): - """ - Base class for 2D psf - """ - def __init__(self,filename): - - - print("desispec.psf is DEPRECATED, PLEASE USE desispec.xytraceset") - - self.traceset = read_xytraceset(filename) - - # all in traceset now. - # psf kept to ease transition - self.npix_y=self.traceset.npix_y - self.xcoeff=self.traceset.x_vs_wave_traceset._coeff # in traceset - self.ycoeff=self.traceset.y_vs_wave_traceset._coeff # in traceset - self.wmin=self.traceset.wavemin # in traceset - self.wmax=self.traceset.wavemax # in traceset - self.nspec=self.traceset.nspec # in traceset - self.ncoeff=self.traceset.x_vs_wave_traceset._coeff.shape[1] # - self.traceset.wave_vs_y(0,100.) # call wave_vs_y for creation of wave_vs_y_traceset and consistent inversion - self.icoeff=self.traceset.wave_vs_y_traceset._coeff # in traceset - self.ymin=self.traceset.wave_vs_y_traceset._xmin # in traceset - self.ymax=self.traceset.wave_vs_y_traceset._xmax # in traceset - - - def x(self,ispec=None,wavelength=None): - """ - returns CCD x centroids for the spectra - ispec can be None, scalar or a vector - wavelength can be None or a vector - """ - if ispec is None : - ispec = np.arange(self.traceset.nspec) - else : - ispec = np.atleast_1d(ispec) - - if wavelength is None : - wavelength = self.wavelength(ispec) - else : - wavelength = np.atleast_1d(wavelength) - - if len(wavelength.shape)==2 : - res=np.zeros(wavelength.shape) - for j,i in enumerate(ispec): - res[j]=self.traceset.x_vs_wave(i,wavelength[i]) - else : - ### print("ispec.size=",ispec.size,"wavelength.size=",wavelength.size) - res=np.zeros((ispec.size,wavelength.size)) - for j,i in enumerate(ispec): - res[j]=self.traceset.x_vs_wave(i,wavelength) - return res - - def y(self,ispec=None,wavelength=None): - """ - returns CCD y centroids for the spectra - ispec can be None, scalar or a vector - wavelength can be a vector but not allowing None #- similar as in specter.psf.PSF.y - """ - if ispec is None : - ispec = np.arange(self.traceset.nspec) - else : - ispec = np.atleast_1d(ispec) - - if wavelength is None : - wavelength = self.wavelength(ispec) - else : - wavelength = np.atleast_1d(wavelength) - - if len(wavelength.shape)==2 : - res=np.zeros(wavelength.shape) - for j,i in enumerate(ispec): - res[j]=self.traceset.y_vs_wave(ii,wavelength[i]) - else : - res=np.zeros((ispec.size,wavelength.size)) - for j,i in enumerate(ispec): - res[j]=self.traceset.y_vs_wave(i,wavelength) - return res - - - def wavelength(self,ispec=None,y=None): - """ - returns wavelength evaluated at y - """ - if y is None: - y=np.arange(0,self.npix_y) - else : - y = np.atleast_1d(y) - - if ispec is None: - ispec = np.arange(self.traceset.nspec) - - if np.size(ispec)==1 : - return self.traceset.wave_vs_y(ispec,y) - else : - if np.size(y)==1 : - res=np.zeros((ispec.size)) - for j,i in enumerate(ispec): - res[j]=self.traceset.wave_vs_y(i,y) - return res - else : - res=np.zeros((ispec.size,y.size)) - for j,i in enumerate(ispec): - res[j]=self.traceset.wave_vs_y(i,y) - return res - - def xsigma(self,ispec,wave): - return self.traceset.xsig_vs_wave(ispec,wave) - - def ysigma(self,ispec,wave): - return self.traceset.ysig_vs_wave(ispec,wave) - - def angstroms_per_pixel(self, ispec, wavelength): - """ - Return CCD pixel width in Angstroms for spectrum ispec at given - wavlength(s). Wavelength may be scalar or array. - """ - ww = self.wavelength(ispec, y=np.arange(self.npix_y)) - dw = np.gradient( ww ) - return np.interp(wavelength, ww, dw) diff --git a/deprecated/py/desispec/quicklook/qlresolution.py b/deprecated/py/desispec/quicklook/qlresolution.py deleted file mode 100644 index 1816d10fd..000000000 --- a/deprecated/py/desispec/quicklook/qlresolution.py +++ /dev/null @@ -1,63 +0,0 @@ -""" -desispec.quicklook.qlresolution -=============================== - -Quicklook version of resolution object that can -calculate resolution efficiently from psf information - -Author: Sami Kama - -""" - -import numpy as np -import scipy.sparse -import scipy.special - -class QuickResolution(scipy.sparse.dia_matrix): - """ - Quicklook version of the resolution mimicking desispec.resolution.Resolution - with some reduction in dimentionality. Contains code from Resolution implementation - Note that this is similar to desispec.resolution.Resolution, though faster and differing - in implementation details that should be cross checked before merging these - or replacing one with the other - """ - def __init__(self,mu=None,sigma=None,wdict=None,waves=None,ndiag=9): - self.__ndiag=ndiag - if ndiag & 0x1 == 0: - raise ValueError("Need odd numbered diagonals, got %d"%ndiag) - def _binIntegral(x,mu=None,sigma=None): - """ - x: bin boundaries vector (self.__ndiag,) - mu: means vector of shape[nwave,1] - sigma: sigmas of shape[nwave,1] - """ - nvecs=1 - if sigma is not None: - nvecs=sigma.shape[0] - if mu is None: - mu=np.zeros((nvecs,1)) - if sigma is None: - sigma=np.ones(mu.shape)*0.5 - sx=(np.tile(x,(mu.shape[0],1))-mu)/(sigma*np.sqrt(2)) - return 0.5*(np.abs(np.diff(scipy.special.erf(sx)))) - - mnone=mu is None - snone=sigma is None - dnone=wdict is None - wnone=waves is None - if snone: - if wnone or dnone: - raise ValueError('Cannot initialize Resolution data need sigma or wdict and waves') - else: - from desiutil import funcfits as dufits - sigma=dufits.func_val(waves,wdict) - nwave = len(sigma) - s=sigma.reshape((nwave,1)) - bins=np.arange(ndiag,0,-1) - bins=bins-(bins[0]+bins[-1])/2.0 - x=np.concatenate([bins+0.5,bins[-1:]-0.5]) - self.offsets=bins - rdata=_binIntegral(x,mu=mu,sigma=s).T - - scipy.sparse.dia_matrix.__init__(self,(rdata,self.offsets),(nwave,nwave)) - diff --git a/deprecated/py/desispec/quicklook/quickfiberflat.py b/deprecated/py/desispec/quicklook/quickfiberflat.py deleted file mode 100644 index 26fcb65ce..000000000 --- a/deprecated/py/desispec/quicklook/quickfiberflat.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -desispec.quicklook.quickfiberflat -================================= - -Here will be the fiberflat routines specific to quicklook. - -G. Dhungana, 2016 -""" - -import numpy as np -from desiutil.log import get_logger - -def compute_fiberflat(): - """ - computes fiberflat: A boss like algorithm writing in progress and will fit in here. - - Args: - """ - -def apply_fiberflat(frame,fiberflat): - """ - Args: frame: desispec.frame.Frame object - fiberflat: desispec.fiberflat.Fiberflat object - """ - from desispec import frame as fr - - # SK. This will not work since the frame object generated here - # does not have all the parameters used in construction of the - # input frame. Unfortunately it is not possible to extract all the - # information from the input either. Possibly correct action would - # be the directly modify the input frame object - - #- update ivar (like in offline case) - - log = get_logger() - - if frame.flux.shape[0] != fiberflat.fiberflat.shape[0] : - mess="not same number of fibers: frame.flux.shape[0]={} != fiberflat.fiberflat.shape[0]={}".format(frame.flux.shape[0],fiberflat.fiberflat.shape[0]) - log.error(mess) - raise RuntimeError(mess) - - if frame.wave.size != fiberflat.wave.size or np.max(np.abs(frame.wave-fiberflat.wave))>0.01 : - log.warning("interpolating fiber flat") - flat=np.ones(frame.flux.shape) - flativar=np.zeros(frame.flux.shape) - for i in range(frame.flux.shape[0]) : - flat[i]=np.interp(frame.wave,fiberflat.wave[fiberflat.ivar[i]>0],fiberflat.fiberflat[i,fiberflat.ivar[i]>0]) - flativar[i]=np.interp(frame.wave,fiberflat.wave,fiberflat.ivar[i]) - else : - flat = fiberflat.fiberflat - flativar= fiberflat.ivar - - frame.ivar=(frame.ivar>0)*(flativar>0)*(flat>0)/( 1./((frame.ivar+(frame.ivar==0))*(flat**2+(flat==0))) + frame.flux**2/(flativar*flat**4+(flativar*flat==0)) ) - - #- flattened flux - ok=np.where(flat > 0) - fflux=frame.flux - fflux[ok]=frame.flux[ok]/flat[ok] - - #- return a frame object - - #fframe=fr.Frame(frame.wave,fflux,fivar,frame.mask,frame.resolution_data,meta=frame.meta,fibermap=frame.fibermap) - - return frame - diff --git a/deprecated/py/desispec/quicklook/quicklook.py b/deprecated/py/desispec/quicklook/quicklook.py deleted file mode 100644 index 46a025ce2..000000000 --- a/deprecated/py/desispec/quicklook/quicklook.py +++ /dev/null @@ -1,378 +0,0 @@ -""" -desispec.quicklook.quicklook -============================ - -""" -from __future__ import absolute_import, division, print_function - -import sys,os,time,signal -import threading,string -import subprocess -import importlib -import yaml -import astropy.io.fits as fits -import desispec.io.fibermap as fibIO -import desispec.io.sky as skyIO -import desispec.io.fiberflat as ffIO -import desispec.fiberflat as ff -import desispec.io.image as imIO -import desispec.image as im -import desispec.io.frame as frIO -import desispec.frame as dframe -from desispec.quicklook import qllogger -from desispec.quicklook import qlheartbeat as QLHB -from desispec.io import qa as qawriter -from desispec.quicklook.merger import QL_QAMerger -from desispec.quicklook import procalgs -from desiutil.io import yamlify - -def get_chan_spec_exp(inpname,camera=None): - """ - Get channel, spectrograph and expid from the filename itself - - Args: - inpname: can be raw or pix, or frame etc filename - camera: is required for raw case, eg, r0, b5, z8 - irrelevant for others - """ - basename=os.path.basename(inpname) - if basename == "": - print("can't parse input file name") - sys.exit("can't parse input file name {}".format(inpname)) - brk=string.split(inpname,'-') - if len(brk)!=3: #- for raw files - if camera is None: - raise IOError("Must give camera for raw file") - else: - expid=int(string.replace(brk[1],".fits.fz","")) - - elif len(brk)==3: #- for pix,frame etc. files - camera=brk[1] - expid=int(string.replace(brk[2],".fits","")) - chan=camera[0] - spectrograph=int(camera[1:]) - return (chan,spectrograph,expid) - -def getobject(conf,log): - #qlog=qllogger("QuickLook",20) - #log=qlog.getlog() - log.debug("Running for {} {} {}".format(conf["ModuleName"],conf["ClassName"],conf)) - try: - mod=__import__(conf["ModuleName"],fromlist=[conf["ClassName"]]) - klass=getattr(mod,conf["ClassName"]) - if "Name" in conf.keys(): - return klass(conf["Name"],conf) - else: - return klass(conf["ClassName"],conf) - except Exception as e: - log.error("Failed to import {} from {}. Error was '{}'".format(conf["ClassName"],conf["ModuleName"],e)) - return None - -def mapkeywords(kw,kwmap): - """ - Maps the keyword in the configuration to the corresponding object - returned by the desispec.io module. - e.g Bias Image file is mapped to biasimage object... for the same keyword "BiasImage" - """ - - newmap={} - # qlog=qllogger.QLLogger() - # log=qlog.getlog() - for k,v in kw.items(): - if isinstance(v,str) and len(v)>=3 and v[0:2]=="%%": #- For direct configuration - if v[2:] in kwmap: - newmap[k]=kwmap[v[2:]] - else: - log.warning("Can't find key {} in conversion map. Skipping".format(v[2:])) - if k in kwmap: #- for configs generated via desispec.quicklook.qlconfig - newmap[k]=kwmap[k] - else: - newmap[k]=v - return newmap - -def runpipeline(pl,convdict,conf): - """ - Runs the quicklook pipeline as configured - - Args: - pl: is a list of [pa,qas] where pa is a pipeline step and qas the corresponding - qas for that pa - convdict: converted dictionary e.g : conf["IMAGE"] is the real psf file - but convdict["IMAGE"] is like desispec.image.Image object and so on. - details in setup_pipeline method below for examples. - conf: a configured dictionary, read from the configuration yaml file. - e.g: conf=configdict=yaml.safe_load(open('configfile.yaml','rb')) - """ - - qlog=qllogger.QLLogger() - log=qlog.getlog() - hb=QLHB.QLHeartbeat(log,conf["Period"],conf["Timeout"]) - - inp=convdict["rawimage"] - singqa=conf["singleqa"] - paconf=conf["PipeLine"] - qlog=qllogger.QLLogger() - log=qlog.getlog() - passqadict=None #- pass this dict to QAs downstream - schemaMerger=QL_QAMerger(conf['Night'],conf['Expid'],conf['Flavor'],conf['Camera'],conf['Program'],convdict) - QAresults=[] - if singqa is None: - for s,step in enumerate(pl): - log.info("Starting to run step {}".format(paconf[s]["StepName"])) - pa=step[0] - pargs=mapkeywords(step[0].config["kwargs"],convdict) - schemaStep=schemaMerger.addPipelineStep(paconf[s]["StepName"]) - try: - hb.start("Running {}".format(step[0].name)) - oldinp=inp #- copy for QAs that need to see earlier input - inp=pa(inp,**pargs) - if step[0].name == 'Initialize': - schemaStep.addMetrics(inp[1]) - except Exception as e: - log.critical("Failed to run PA {} error was {}".format(step[0].name,e),exc_info=True) - sys.exit("Failed to run PA {}".format(step[0].name)) - qaresult={} - for qa in step[1]: - try: - qargs=mapkeywords(qa.config["kwargs"],convdict) - hb.start("Running {}".format(qa.name)) - qargs["dict_countbins"]=passqadict #- pass this to all QA downstream - - if qa.name=="RESIDUAL" or qa.name=="Sky_Residual": - res=qa(inp[0],inp[1],**qargs) - else: - if isinstance(inp,tuple): - res=qa(inp[0],**qargs) - else: - res=qa(inp,**qargs) - - if qa.name=="COUNTBINS" or qa.name=="CountSpectralBins": - passqadict=res - if "qafile" in qargs: - qawriter.write_qa_ql(qargs["qafile"],res) - log.debug("{} {}".format(qa.name,inp)) - qaresult[qa.name]=res - schemaStep.addParams(res['PARAMS']) - schemaStep.addMetrics(res['METRICS']) - except Exception as e: - log.warning("Failed to run QA {}. Got Exception {}".format(qa.name,e),exc_info=True) - hb.stop("Step {} finished.".format(paconf[s]["StepName"])) - QAresults.append([pa.name,qaresult]) - hb.stop("Pipeline processing finished. Serializing result") - else: - import numpy as np - qa=None - qas=[[],['Bias_From_Overscan','Get_RMS','Count_Pixels','Calc_XWSigma'],'Trace_Shifts','CountSpectralBins',['Sky_Continuum','Sky_Peaks'],['Calculate_SNR'],['Sky_Rband','Integrate_Spec']] - - singleqaperpa=['Bias_From_Overscan','Check_HDUs','Trace_Shifts','CountSpectralBins'] - for palg in range(len(qas)): - if singqa in qas[palg]: - pa=pl[palg][0] - pac=paconf[palg] - if singqa in singleqaperpa: - qa = pl[palg][1][0] - else: - for qalg in range(len(qas[palg])): - if qas[palg][qalg] == singqa: - qa=pl[palg][1][qalg] - if qa is None: - log.critical("Unknown input QA... Valid QAs are: {}".format(qas)) - sys.exit() - - log.info("Starting to run step {}".format(pac["StepName"])) - pargs=mapkeywords(pa.config["kwargs"],convdict) - schemaStep=schemaMerger.addPipelineStep(pac["StepName"]) - qaresult={} - try: - qargs=mapkeywords(qa.config["kwargs"],convdict) - hb.start("Running {}".format(qa.name)) - if singqa=="Sky_Residual": - res=qa(inp[0],inp[1],**qargs) - else: - if isinstance(inp,tuple): - res=qa(inp[0],**qargs) - else: - res=qa(inp,**qargs) - if singqa=="CountSpectralBins": - passqadict=res - if "qafile" in qargs: - qawriter.write_qa_ql(qargs["qafile"],res) - log.debug("{} {}".format(qa.name,inp)) - schemaStep.addMetrics(res['METRICS']) - except Exception as e: - log.warning("Failed to run QA {}. Got Exception {}".format(qa.name,e),exc_info=True) - if len(qaresult): - if conf["DumpIntermediates"]: - f = open(pac["OutputFile"],"w") - f.write(yaml.dump(yamlify(qaresult))) - log.info("{} finished".format(qa.name)) - - #- merge QAs for this pipeline execution - #- RS: don't write merged file if running single QA - if singqa is None: - log.debug("Dumping mergedQAs") - from desispec.io import findfile - specprod_dir=os.environ['QL_SPEC_REDUX'] if 'QL_SPEC_REDUX' in os.environ else "" - destFile=findfile('ql_mergedQA_file',night=conf['Night'], - expid=conf['Expid'], - camera=conf['Camera'], - specprod_dir=specprod_dir) - - schemaMerger.writeTojsonFile(destFile) - log.info("Wrote merged QA file {}".format(destFile)) - if isinstance(inp,tuple): - return inp[0] - else: - return inp - -#- Setup pipeline from configuration - -def setup_pipeline(config): - """ - Given a configuration from QLF, this sets up a pipeline [pa,qa] and also returns a - conversion dictionary from the configuration dictionary so that Pipeline steps (PA) can - take them. This is required for runpipeline. - """ - qlog=qllogger.QLLogger() - log=qlog.getlog() - if config is None: - return None - log.debug("Reading Configuration") - flavor=config["Flavor"] - if "RawImage" not in config: - log.critical("Config is missing \"RawImage\" key.") - sys.exit("Missing \"RawImage\" key.") - inpname=config["RawImage"] - if flavor != 'bias' and flavor != 'dark': - if "FiberMap" not in config: - log.critical("Config is missing \"FiberMap\" key.") - sys.exit("Missing \"FiberMap\" key.") - fibname=config["FiberMap"] - proctype="Exposure" - if "Camera" in config: - camera=config["Camera"] - if "DataType" in config: - proctype=config["DataType"] - debuglevel=20 - if "DebugLevel" in config: - debuglevel=config["DebugLevel"] - log.setLevel(debuglevel) - hbeat=QLHB.QLHeartbeat(log,config["Period"],config["Timeout"]) - if config["Timeout"]> 200.0: - log.warning("Heartbeat timeout exceeding 200.0 seconds") - dumpintermediates=False - if "DumpIntermediates" in config: - dumpintermediates=config["DumpIntermediates"] - - biasimage=None #- This will be the converted dictionary key - biasfile=None - if "BiasImage" in config: - biasfile=config["BiasImage"] - - darkimage=None - darkfile=None - if "DarkImage" in config: - darkfile=config["DarkImage"] - - pixelflatfile=None - pixflatimage=None - if "PixelFlat" in config: - pixelflatfile=config["PixelFlat"] - - fiberflatimagefile=None - fiberflatimage=None - if "FiberFlatImage" in config: - fiberflatimagefile=config["FiberFlatImage"] - - arclampimagefile=None - arclampimage=None - if "ArcLampImage" in config: - arclampimagefile=config["ArcLampImage"] - - fiberflatfile=None - fiberflat=None - if config["Flavor"] == 'science': - if "FiberFlatFile" in config: - fiberflatfile=config["FiberFlatFile"] - - skyfile=None - skyimage=None - if "SkyFile" in config: - skyfile=config["SkyFile"] - - psf_filename=None - if "PSFFile" in config: - psf_filename=config["PSFFile"] - #import desispec.psf - #psf=desispec.psf.PSF(config["PSFFile"]) - - if "basePath" in config: - basePath=config["basePath"] - - hbeat.start("Reading input file {}".format(inpname)) - inp=fits.open(inpname) #- reading raw image directly from astropy.io.fits - hbeat.start("Reading fiberMap file {}".format(fibname)) - - convdict={} - - if flavor != 'bias' and flavor != 'dark': - fibfile=fibIO.read_fibermap(fibname) - fibhdr=fibfile.meta - convdict["FiberMap"]=fibfile - - if psf_filename is not None: - convdict["PSFFile"]=psf_filename - - if biasfile is not None: - hbeat.start("Reading Bias Image {}".format(biasfile)) - biasimage=imIO.read_image(biasfile) - convdict["BiasImage"]=biasimage - - if darkfile is not None: - hbeat.start("Reading Dark Image {}".format(darkfile)) - darkimage=imIO.read_image(darkfile) - convdict["DarkImage"]=darkimage - - if pixelflatfile: - hbeat.start("Reading PixelFlat Image {}".format(pixelflatfile)) - pixelflatimage=imIO.read_image(pixelflatfile) - convdict["PixelFlat"]=pixelflatimage - - if fiberflatfile: - hbeat.start("Reading FiberFlat {}".format(fiberflatfile)) - fiberflat=ffIO.read_fiberflat(fiberflatfile) - convdict["FiberFlatFile"]=fiberflat - - if skyfile: - hbeat.start("Reading SkyModel file {}".format(skyfile)) - skymodel=skyIO.read_sky(skyfile) - convdict["SkyFile"]=skymodel - - if dumpintermediates: - convdict["DumpIntermediates"]=dumpintermediates - - hbeat.stop("Finished reading all static files") - - img=inp - convdict["rawimage"]=img - pipeline=[] - for step in config["PipeLine"]: - pa=getobject(step["PA"],log) - if len(pipeline) == 0: - if not pa.is_compatible(type(img)): - log.critical("Pipeline configuration is incorrect! check configuration {} {}".format(img,pa.is_compatible(img))) - sys.exit("Wrong pipeline configuration") - else: - if not pa.is_compatible(pipeline[-1][0].get_output_type()): - log.critical("Pipeline configuration is incorrect! check configuration") - log.critical("Can't connect input of {} to output of {}. Incompatible types".format(pa.name,pipeline[-1][0].name)) - sys.exit("Wrong pipeline configuration") - qas=[] - for q in step["QAs"]: - qa=getobject(q,log) - if not qa.is_compatible(pa.get_output_type()): - log.warning("QA {} can not be used for output of {}. Skipping expecting {} got {} {}".format(qa.name,pa.name,qa.__inpType__,pa.get_output_type(),qa.is_compatible(pa.get_output_type()))) - else: - qas.append(qa) - pipeline.append([pa,qas]) - return pipeline,convdict diff --git a/deprecated/py/desispec/quicklook/quicksky.py b/deprecated/py/desispec/quicklook/quicksky.py deleted file mode 100644 index c8e166e72..000000000 --- a/deprecated/py/desispec/quicklook/quicksky.py +++ /dev/null @@ -1,211 +0,0 @@ -""" -desispec.quicklook.quicksky -=========================== - -Here will be the sky computing and sky subtraction routines for QL. -""" -import sys -import numpy as np -from desispec.sky import SkyModel -from desispec import util -from desispec import frame as fr -import scipy -from desispec.resolution import Resolution -from desispec.linalg import cholesky_solve - -def compute_sky(fframe,fibermap=None,nsig_clipping=4., apply_resolution=False): - """ - Adding in the offline algorithm here to be able to apply resolution for sky compute. - We will update this here as needed for quicklook. - The original weighted sky compute still is the default. - - Args: fframe: fiberflat fielded frame object - fibermap: fibermap object - apply_resolution: if True, uses the resolution in the frame object to evaluate - sky allowing fiber to fiber variation of resolution. - """ - nspec=fframe.nspec - nwave=fframe.nwave - - #- Check with fibermap. exit if None - #- use fibermap from frame itself if exists - - if fframe.fibermap is not None: - fibermap=fframe.fibermap - - if fibermap is None: - print("Must have fibermap for Sky compute") - sys.exit(0) - - #- get the sky - skyfibers = np.where(fibermap['OBJTYPE'] == 'SKY')[0] - skyfluxes=fframe.flux[skyfibers] - skyivars=fframe.ivar[skyfibers] - - - nfibers=len(skyfibers) - - if apply_resolution: - max_iterations=100 - current_ivar=skyivars.copy() - Rsky = fframe.R[skyfibers] - sqrtw=np.sqrt(skyivars) - sqrtwflux=sqrtw*skyfluxes - - chi2=np.zeros(skyfluxes.shape) - - nout_tot=0 - for iteration in range(max_iterations) : - - A=scipy.sparse.lil_matrix((nwave,nwave)).tocsr() - B=np.zeros((nwave)) - # diagonal sparse matrix with content = sqrt(ivar)*flat of a given fiber - SD=scipy.sparse.lil_matrix((nwave,nwave)) - # loop on fiber to handle resolution - for fiber in range(nfibers) : - if fiber%10==0 : - print("iter %d fiber %d"%(iteration,fiber)) - R = Rsky[fiber] - - # diagonal sparse matrix with content = sqrt(ivar) - SD.setdiag(sqrtw[fiber]) - - sqrtwR = SD*R # each row r of R is multiplied by sqrtw[r] - - A = A+(sqrtwR.T*sqrtwR).tocsr() - B += sqrtwR.T*sqrtwflux[fiber] - - print("iter %d solving"%iteration) - - w = A.diagonal()>0 - A_pos_def = A.todense()[w,:] - A_pos_def = A_pos_def[:,w] - skyflux = B*0 - try: - skyflux[w]=cholesky_solve(A_pos_def,B[w],rcond=None) - except: - print("cholesky failed, trying svd in iteration {}".format(iteration)) - skyflux[w]=np.linalg.lstsq(A_pos_def,B[w],rcond=None)[0] - - print("iter %d compute chi2"%iteration) - - for fiber in range(nfibers) : - - S = Rsky[fiber].dot(skyflux) - chi2[fiber]=current_ivar[fiber]*(skyfluxes[fiber]-S)**2 - - print("rejecting") - - nout_iter=0 - if iteration<1 : - # only remove worst outlier per wave - # apply rejection iteratively, only one entry per wave among fibers - # find waves with outlier (fastest way) - nout_per_wave=np.sum(chi2>nsig_clipping**2,axis=0) - selection=np.where(nout_per_wave>0)[0] - for i in selection : - worst_entry=np.argmax(chi2[:,i]) - current_ivar[worst_entry,i]=0 - sqrtw[worst_entry,i]=0 - sqrtwflux[worst_entry,i]=0 - nout_iter += 1 - else : - # remove all of them at once - bad=(chi2>nsig_clipping**2) - current_ivar *= (bad==0) - sqrtw *= (bad==0) - sqrtwflux *= (bad==0) - nout_iter += np.sum(bad) - - nout_tot += nout_iter - - sum_chi2=float(np.sum(chi2)) - ndf=int(np.sum(chi2>0)-nwave) - chi2pdf=0. - if ndf>0 : - chi2pdf=sum_chi2/ndf - print("iter #%d chi2=%f ndf=%d chi2pdf=%f nout=%d"%(iteration,sum_chi2,ndf,chi2pdf,nout_iter)) - - if nout_iter == 0 : - break - - print("nout tot=%d"%nout_tot) - # solve once again to get deconvolved sky variance - #skyflux,skycovar=cholesky_solve_and_invert(A.todense(),B) - skyflux = np.linalg.lstsq(A.todense(),B,rcond=None)[0] - skycovar = np.linalg.pinv(A.todense()) - #- sky inverse variance, but incomplete and not needed anyway - # skyvar=np.diagonal(skycovar) - # skyivar=(skyvar>0)/(skyvar+(skyvar==0)) - - # Use diagonal of skycovar convolved with mean resolution of all fibers - # first compute average resolution - #- computing mean from matrix itself - R= (fframe.R.sum()/fframe.nspec).todia() - #mean_res_data=np.mean(fframe.resolution_data,axis=0) - #R = Resolution(mean_res_data) - # compute convolved sky and ivar - cskycovar=R.dot(skycovar).dot(R.T.todense()) - cskyvar=np.diagonal(cskycovar) - cskyivar=(cskyvar>0)/(cskyvar+(cskyvar==0)) - - # convert cskyivar to 2D; today it is the same for all spectra, - # but that may not be the case in the future - finalskyivar = np.tile(cskyivar, nspec).reshape(nspec, nwave) - - # Convolved sky - finalskyflux = np.zeros(fframe.flux.shape) - for i in range(nspec): - finalskyflux[i] = fframe.R[i].dot(skyflux) - - # need to do better here - mask = (finalskyivar==0).astype(np.uint32) - - else: #- compute weighted average sky ignoring the fiber/wavelength resolution - if skyfibers.shape[0] > 1: - - weights=skyivars - #- now get weighted meansky and ivar - meanskyflux=np.average(skyfluxes,axis=0,weights=weights) - wtot=weights.sum(axis=0) - werr2=(weights**2*(skyfluxes-meanskyflux)**2).sum(axis=0) - werr=np.sqrt(werr2)/wtot - meanskyivar=1./werr**2 - else: - meanskyflux=skyfluxes - meanskyivar=skyivars - - #- Create a 2d- sky model replicating this - finalskyflux=np.tile(meanskyflux,nspec).reshape(nspec,nwave) - finalskyivar=np.tile(meanskyivar,nspec).reshape(nspec,nwave) - mask=fframe.mask - - skymodel=SkyModel(fframe.wave,finalskyflux,finalskyivar,mask) - return skymodel - - -def subtract_sky(fframe,skymodel): - """ - skymodel: skymodel object. - fframe: frame object to do the sky subtraction, should be already fiber flat fielded - need same number of fibers and same wavelength grid - """ - #- Check number of specs - assert fframe.nspec == skymodel.nspec - assert fframe.nwave == skymodel.nwave - - #- check same wavelength grid, die if not - if not np.allclose(fframe.wave, skymodel.wave): - message = "frame and sky not on same wavelength grid" - raise ValueError(message) - - #SK. This wouldn't work since not all properties of the input - #frame is modified. Just modify input frame directly instead! - - fframe.flux= fframe.flux-skymodel.flux - fframe.ivar = util.combine_ivar(fframe.ivar.clip(1e-8), skymodel.ivar.clip(1e-8)) - fframe.mask = fframe.mask | skymodel.mask - #- create a frame object now - #sframe=fr.Frame(fframe.wave,sflux,sivar,smask,fframe.resolution_data,meta=fframe.meta,fibermap=fframe.fibermap) - return fframe - diff --git a/deprecated/py/desispec/scripts/daily_processing.py b/deprecated/py/desispec/scripts/daily_processing.py deleted file mode 100644 index bd2408f77..000000000 --- a/deprecated/py/desispec/scripts/daily_processing.py +++ /dev/null @@ -1,523 +0,0 @@ -""" -desispec.scripts.daily_processing -================================= - -""" -import numpy as np -import os -import sys -import time -from astropy.table import Table -import glob - -## Import some helper functions, you can see their definitions by uncomenting the bash shell command -from desispec.workflow.tableio import load_tables, write_tables, write_table -from desispec.workflow.utils import verify_variable_with_environment, pathjoin, listpath, \ - get_printable_banner, sleep_and_report -from desispec.workflow.timing import during_operating_hours, what_night_is_it, wait_for_cals -from desispec.workflow.exptable import default_obstypes_for_exptable, get_exposure_table_column_defs, \ - get_exposure_table_path, get_exposure_table_name, summarize_exposure -from desispec.workflow.proctable import default_obstypes_for_proctable, \ - get_processing_table_path, \ - get_processing_table_name, \ - erow_to_prow, default_prow -from desispec.workflow.processing import parse_previous_tables, flat_joint_fit, arc_joint_fit, get_type_and_tile, \ - science_joint_fit, define_and_assign_dependency, create_and_submit, \ - update_and_recursively_submit, checkfor_and_submit_joint_job, \ - submit_tilenight_and_redshifts -from desispec.workflow.queue import update_from_queue, any_jobs_not_complete -from desispec.io.util import difference_camwords, parse_badamps, validate_badamps - -def daily_processing_manager(specprod=None, exp_table_path=None, proc_table_path=None, path_to_data=None, - expobstypes=None, procobstypes=None, z_submit_types=None, camword=None, badcamword=None, - badamps=None, override_night=None, tab_filetype='csv', queue='realtime', - exps_to_ignore=None, data_cadence_time=300, queue_cadence_time=1800, - exp_cadence_time=2, - dry_run_level=0, dry_run=False, no_redshifts=False, continue_looping_debug=False, dont_check_job_outputs=False, - dont_resubmit_partial_jobs=False, verbose=False, use_specter=False, use_tilenight=False): - """ - Generates processing tables for the nights requested. Requires exposure tables to exist on disk. - - Args: - specprod: str. The name of the current production. If used, this will overwrite the SPECPROD environment variable. - exp_table_path: str. Full path to where to exposure tables are stored, WITHOUT the monthly directory included. - proc_table_path: str. Full path to where to processing tables to be written. - path_to_data: str. Path to the raw data. - expobstypes: str or comma separated list of strings. The exposure OBSTYPE's that you want to include in the exposure table. - procobstypes: str or comma separated list of strings. The exposure OBSTYPE's that you want to include in the processing table. - z_submit_types: list of str's or comma separated list of string. The "group" types of redshifts that should be - submitted with each exposure. If not specified, default for daily processing is - ['cumulative', 'pernight-v0']. If false, 'false', or [], then no redshifts are submitted. - camword: str. Camword that, if set, alters the set of cameras that will be set for processing. - Examples: a0123456789, a1, a2b3r3, a2b3r4z3. - badcamword: str. Camword that, if set, will be removed from the camword defined in camword if given, or the camword - inferred from the data if camword is not given. - badamps: str. Comma seperated list of bad amplifiers that should not be processed. Should be of the - form "{camera}{petal}{amp}", i.e. "[brz][0-9][ABCD]". Example: 'b7D,z8A' - override_night: str or int. 8 digit night, e.g. 20200314, of data to run on. If None, it runs on the current night. - tab_filetype: str. The file extension (without the '.') of the exposure and processing tables. - queue: str. The name of the queue to submit the jobs to. Default is "realtime". - exps_to_ignore: list. A list of exposure id's that should not be processed. Each should be an integer. - data_cadence_time: int. Wait time in seconds between loops in looking for new data. Default is 30 seconds. - queue_cadence_time: int. Wait time in seconds between loops in checking queue statuses and resubmitting failures. Default is 1800s. - exp_cadence_time: int. Wait time in seconds between loops over each science exposure. Default 2. - dry_run_level, int, If nonzero, this is a simulated run. If dry_run=1 the scripts will be written or submitted. If - dry_run=2, the scripts will not be writter or submitted. Logging will remain the same - for testing as though scripts are being submitted. Default is 0 (false). - dry_run, bool. When to run without submitting scripts or not. If dry_run_level is defined, then it over-rides - this flag. dry_run_level not set and dry_run=True, dry_run_level is set to 2 (no scripts - generated or run). Default for dry_run is False. - no_redshifts, bool. Whether to submit redshifts or not. If True, redshifts are not submitted. - continue_looping_debug: bool. FOR DEBUG purposes only. Will continue looping in search of new data until the process - is terminated. Default is False. - dont_check_job_outputs, bool. Default is False. If False, the code checks for the existence of the expected final - data products for the script being submitted. If all files exist and this is False, - then the script will not be submitted. If some files exist and this is False, only the - subset of the cameras without the final data products will be generated and submitted. - dont_resubmit_partial_jobs, bool. Default is False. Must be used with dont_check_job_outputs=False. If this flag is - False, jobs with some prior data are pruned using PROCCAMWORD to only process the - remaining cameras not found to exist. - verbose: bool. True if you want more verbose output, false otherwise. Current not propagated to lower code, - so it is only used in the main daily_processing script itself. - use_specter, bool, optional. Default is False. If True, use specter, otherwise use gpu_specter by default. - use_tilenight (bool, optional): Default is False. If True, use desi_proc_tilenight for prestdstar, stdstar, - and poststdstar steps for science exposures. - - Returns: Nothing - - Notes: - Generates both exposure table and processing tables 'on the fly' and saves them at various checkpoints. These - should be capable of being reloaded in case of interuption or accidental termination of the manager's process. - """ - ## If not being done during operating hours, and we're not simulating data or running a catchup run, exit. - if not during_operating_hours(dry_run=dry_run) and override_night is None: - print("Not during operating hours, and not asked to perform a dry run or run on historic data. Exiting.") - sys.exit(0) - - ## What night are we running on? - true_night = what_night_is_it() - if override_night is not None: - night = int(override_night) - print(f"True night is {true_night}, but running for night={night}") - else: - night = true_night - - ## Wait for calibrations to completely arrive before proceeding, - ## since we need to measure CTE from flats first - found_cals = wait_for_cals(night) - if not found_cals: - print(f"ERROR: didn't find calibration data for {night}") - sys.exit(1) - - if continue_looping_debug: - print("continue_looping_debug is set. Will continue looking for new data and needs to be terminated by the user.") - - ## Recast booleans from double negative - check_for_outputs = (not dont_check_job_outputs) - resubmit_partial_complete = (not dont_resubmit_partial_jobs) - - ## Define the obstypes to process - if procobstypes is None: - procobstypes = default_obstypes_for_proctable() - elif isinstance(procobstypes, str): - procobstypes = procobstypes.split(',') - - ## Define the obstypes to save information for in the exposure table - if expobstypes is None: - expobstypes = default_obstypes_for_exptable() - elif isinstance(expobstypes, str): - expobstypes = expobstypes.split(',') - - ## Define the group types of redshifts you want to generate for each tile - if no_redshifts: - z_submit_types = None - else: - if z_submit_types is None: - pass - elif isinstance(z_submit_types, str): - if z_submit_types.lower() == 'false': - z_submit_types = None - elif z_submit_types.lower() == 'none': - z_submit_types = None - else: - z_submit_types = [ztype.strip().lower() for ztype in z_submit_types.split(',')] - for ztype in z_submit_types: - if ztype not in ['cumulative', 'pernight-v0', 'pernight', 'perexp']: - raise ValueError(f"Couldn't understand ztype={ztype} in z_submit_types={z_submit_types}.") - else: - raise ValueError(f"Couldn't understand z_submit_types={z_submit_types}, type={type(z_submit_types)}.") - - if z_submit_types is None: - print("Not submitting scripts for redshift fitting") - else: - print(f"Redshift fitting with redshift group types: {z_submit_types}") - - ## Reconcile the dry_run and dry_run_level - if dry_run and dry_run_level == 0: - dry_run_level = 2 - elif dry_run_level > 0: - dry_run = True - - ## expobstypes must contain all the types used in processing - for typ in procobstypes: - if typ not in expobstypes: - expobstypes.append(typ) - - ## Warn people if changing camword - finalcamword = 'a0123456789' - if camword is not None and badcamword is None: - badcamword = difference_camwords(finalcamword,camword) - finalcamword = camword - elif camword is not None and badcamword is not None: - finalcamword = difference_camwords(camword, badcamword) - badcamword = difference_camwords('a0123456789', finalcamword) - elif badcamword is not None: - finalcamword = difference_camwords(finalcamword,badcamword) - else: - badcamword = '' - - if badcamword != '': - ## Inform the user what will be done with it. - print(f"Modifying camword of data to be processed with badcamword: {badcamword}. "+\ - f"Camword to be processed: {finalcamword}") - - ## Make sure badamps is formatted properly - if badamps is None: - badamps = '' - else: - badamps = validate_badamps(badamps) - - ## Define the set of exposures to ignore - if exps_to_ignore is None: - exps_to_ignore = set() - else: - exps_to_ignore = np.sort(np.array(exps_to_ignore).astype(int)) - print(f"\nReceived exposures to ignore: {exps_to_ignore}") - exps_to_ignore = set(exps_to_ignore) - - ## Get context specific variable values - colnames, coltypes, coldefaults = get_exposure_table_column_defs(return_default_values=True) - - ## Define where to find the data - path_to_data = verify_variable_with_environment(var=path_to_data,var_name='path_to_data', env_name='DESI_SPECTRO_DATA') - specprod = verify_variable_with_environment(var=specprod,var_name='specprod',env_name='SPECPROD') - - ## Define the naming scheme for the raw data - ## Manifests (describing end of cals, etc.) don't have a data file, so search for those separately - data_glob = os.path.join(path_to_data, str(night), '*', 'desi-*.fit*') - manifest_glob = os.path.join(path_to_data, str(night), '*', 'manifest_*.json') - - ## Determine where the exposure table will be written - if exp_table_path is None: - exp_table_path = get_exposure_table_path(night=night, usespecprod=True) - os.makedirs(exp_table_path, exist_ok=True) - name = get_exposure_table_name(night=night, extension=tab_filetype) - exp_table_pathname = pathjoin(exp_table_path, name) - - ## Determine where the processing table will be written - if proc_table_path is None: - proc_table_path = get_processing_table_path() - os.makedirs(proc_table_path, exist_ok=True) - name = get_processing_table_name(prodmod=night, extension=tab_filetype) - proc_table_pathname = pathjoin(proc_table_path, name) - - ## Determine where the unprocessed data table will be written - unproc_table_pathname = pathjoin(proc_table_path,name.replace('processing', 'unprocessed')) - - ## Combine the table names and types for easier passing to io functions - table_pathnames = [exp_table_pathname, proc_table_pathname, unproc_table_pathname] - table_types = ['exptable','proctable','unproctable'] - - ## Load in the files defined above - etable, ptable, unproc_table = load_tables(tablenames=table_pathnames, \ - tabletypes=table_types) - - ## Get relevant data from the tables - all_exps = set(etable['EXPID']) - arcs, flats, sciences, calibjobs, curtype, lasttype, \ - curtile, lasttile, internal_id = parse_previous_tables(etable, ptable, night) - do_bias = ('bias' in procobstypes or 'dark' in procobstypes) - - ## While running on the proper night and during night hours, - ## or doing a dry_run or override_night, keep looping - while ( (night == what_night_is_it()) and during_operating_hours(dry_run=dry_run) ) or ( override_night is not None ): - ## Get a list of new exposures that have been found - print(f"\n\n\nPreviously known exposures: {all_exps}") - data_exps = set(sorted([int(os.path.basename(os.path.dirname(fil))) for fil in glob.glob(data_glob)])) - manifest_exps = set(sorted([int(os.path.basename(os.path.dirname(fil))) for fil in glob.glob(manifest_glob)])) - located_exps = data_exps.union(manifest_exps) - - new_exps = located_exps.difference(all_exps) - all_exps = located_exps # i.e. new_exps.union(all_exps) - print(f"\nNew exposures: {new_exps}\n\n") - - ## If there aren't any new exps and there won't be more because we're running on an old night or simulating things, exit - if (not continue_looping_debug) and ( override_night is not None ) and ( len(list(new_exps))==0 ): - print("Terminating the search for new exposures because no new exposures are present and you have" + \ - " override_night set without continue_looping_debug") - break - - ## Loop over new exposures and process them as relevant to that type - for exp in sorted(list(new_exps)): - if verbose: - print(get_printable_banner(str(exp))) - else: - print(f'\n\n##################### {exp} #########################') - - ## Open relevant raw data files to understand what we're dealing with - erow = summarize_exposure(path_to_data, night, exp, expobstypes, colnames, coldefaults, verbosely=False) - - ## If there was an issue, continue. If it's a string summarizing the end of some sequence, use that info. - ## If the exposure is assosciated with data, process that data. - if erow is None: - continue - elif type(erow) is str: - writeout = False - if exp in exps_to_ignore: - print(f"Located {erow} in exposure {exp}, but the exposure was listed in the expids to ignore. Ignoring this.") - elif erow == 'endofarcs' and calibjobs['psfnight'] is None and 'arc' in procobstypes: - print("\nLocated end of arc calibration sequence flag. Processing psfnight.\n") - ptable, calibjobs['psfnight'], internal_id = arc_joint_fit(ptable, arcs, internal_id, dry_run=dry_run_level, queue=queue) - writeout = True - elif erow == 'endofflats' and calibjobs['nightlyflat'] is None and 'flat' in procobstypes: - print("\nLocated end of long flat calibration sequence flag. Processing nightlyflat.\n") - ptable, calibjobs['nightlyflat'], internal_id = flat_joint_fit(ptable, flats, internal_id, dry_run=dry_run_level, queue=queue) - writeout = True - elif 'short' in erow and calibjobs['nightlyflat'] is None: - print("\nLocated end of short flat calibration flag. Removing flats from list for nightlyflat processing.\n") - flats = [] - if writeout and dry_run_level < 3: - write_tables([ptable], tablenames=[proc_table_pathname]) - sleep_and_report(2, message_suffix=f"after joint fit", dry_run=dry_run) - del writeout - continue - else: - ## Else it's a real row so start processing it - pass - - erow['BADCAMWORD'] = badcamword - erow['BADAMPS'] = badamps - unproc = False - if exp in exps_to_ignore: - print(f"\n{exp} given as exposure id to ignore. Not processing.") - erow['LASTSTEP'] = 'ignore' - # erow['EXPFLAG'] = np.append(erow['EXPFLAG'], ) - unproc = True - elif erow['LASTSTEP'] == 'ignore': - print(f"\n{exp} identified by the pipeline as something to ignore. Not processing.") - unproc = True - elif erow['OBSTYPE'] not in procobstypes: - print(f"\n{erow['OBSTYPE']} not in obstypes to process: {procobstypes}. Not processing.") - unproc = True - elif str(erow['OBSTYPE']).lower() == 'arc' and float(erow['EXPTIME']) > 8.0: - print("\nArc exposure with EXPTIME greater than 8s. Not processing.") - unproc = True - elif str(erow['OBSTYPE']).lower() == 'dark' and np.abs(float(erow['EXPTIME'])-300.) > 1: - print("\nDark exposure with EXPTIME not consistent with 300s. Not processing.") - unproc = True - elif str(erow['OBSTYPE']).lower() == 'dark' and calibjobs['ccdcalib'] is not None: - print("\nDark exposure found, but already proocessed dark with" + - f" expID {calibjobs['ccdcalib']['EXPID']}. Skipping this one.") - unproc = True - - print(f"\nFound: {erow}") - etable.add_row(erow) - if unproc: - unproc_table.add_row(erow) - sleep_and_report(2, message_suffix=f"after exposure", dry_run=dry_run) - if dry_run_level < 3: - write_tables([etable, unproc_table], tablenames=[exp_table_pathname, unproc_table_pathname]) - continue - - curtype,curtile = get_type_and_tile(erow) - - if lasttype is None and curtype != 'dark' and do_bias: - print("\nNo dark found at the beginning of the night." - + "Submitting nightlybias before processing exposures.\n") - prow = default_prow() - prow['INTID'] = internal_id - prow['OBSTYPE'] = 'zero' - internal_id += 1 - prow['JOBDESC'] = 'nightlybias' - prow['NIGHT'] = night - prow['CALIBRATOR'] = 1 - prow['PROCCAMWORD'] = finalcamword - prow = create_and_submit(prow, dry_run=dry_run_level, - queue=queue, - strictly_successful=True, - check_for_outputs=check_for_outputs, - resubmit_partial_complete=resubmit_partial_complete) - calibjobs['nightlybias'] = prow.copy() - ## Add the processing row to the processing table - ptable.add_row(prow) - ## Write out the processing table - if dry_run_level < 3: - write_tables([ptable], tablenames=[proc_table_pathname]) - sleep_and_report(2, message_suffix=f"after nightlybias", - dry_run=dry_run) - - # if this is a new tile/obstype, proceed with submitting all of the jobs for the previous tile - if lasttype is not None and ((curtype != lasttype) or (curtile != lasttile)): - print("\nData for previous tile or obstype is complete. Running joint fits. " - + f"{curtype=}, {lasttype=}, {curtile=}, {lasttile=}\n") - old_iid = internal_id - # If done with science exposures for a tile and use_tilenight==True, use - # submit_tilenight_and_redshifts, otherwise use checkfor_and_submit_joint_job - if use_tilenight and lasttype == 'science' and len(sciences)>0: - extra_job_args = {} - extra_job_args['z_submit_types'] = z_submit_types - extra_job_args['laststeps'] = ['all','fluxcalib','skysub'] - ptable, sciences, internal_id \ - = submit_tilenight_and_redshifts(ptable, sciences, calibjobs, internal_id, - dry_run=dry_run_level, - queue=queue, - strictly_successful=True, - check_for_outputs=check_for_outputs, - resubmit_partial_complete=resubmit_partial_complete, - use_specter=use_specter, extra_job_args=extra_job_args) - else: - ptable, calibjobs, sciences, internal_id \ - = checkfor_and_submit_joint_job(ptable, arcs, flats, sciences, calibjobs, - lasttype, internal_id, - dry_run=dry_run_level, - queue=queue, - strictly_successful=True, - check_for_outputs=check_for_outputs, - resubmit_partial_complete=resubmit_partial_complete, - z_submit_types=z_submit_types) - - ## if internal_id changed that means we submitted a joint job - ## so lets write that out and pause - if (internal_id > old_iid) and (dry_run_level < 3): - write_tables([ptable], tablenames=[proc_table_pathname]) - sleep_and_report(2, message_suffix=f"after joint fit", dry_run=dry_run) - del old_iid - - prow = erow_to_prow(erow) - prow['INTID'] = internal_id - internal_id += 1 - if prow['OBSTYPE'] == 'dark': - prow['JOBDESC'] = 'ccdcalib' - else: - prow['JOBDESC'] = prow['OBSTYPE'] - prow = define_and_assign_dependency(prow, calibjobs) - if (not use_tilenight) or erow['OBSTYPE'] != 'science': - print(f"\nProcessing: {prow}\n") - prow = create_and_submit(prow, dry_run=dry_run_level, queue=queue, - strictly_successful=True, check_for_outputs=check_for_outputs, - resubmit_partial_complete=resubmit_partial_complete,use_specter=use_specter) - - ## If processed a dark, assign that to the dark job - if curtype == 'dark': - prow['CALIBRATOR'] = 1 - calibjobs['ccdcalib'] = prow.copy() - - ## Add the processing row to the processing table - ptable.add_row(prow) - - ## Note: Assumption here on number of flats - if curtype == 'flat' and calibjobs['nightlyflat'] is None \ - and int(erow['SEQTOT']) < 5 \ - and np.abs(float(erow['EXPTIME'])-120.) < 1.: - flats.append(prow) - elif curtype == 'arc' and calibjobs['psfnight'] is None: - arcs.append(prow) - elif curtype == 'science' and (use_tilenight or prow['LASTSTEP'] != 'skysub'): - sciences.append(prow) - - lasttile = curtile - lasttype = curtype - - ## Flush the outputs - sys.stdout.flush() - sys.stderr.flush() - - if dry_run_level < 3: - write_tables([etable, ptable], tablenames=[exp_table_pathname, proc_table_pathname]) - sleep_and_report(exp_cadence_time, message_suffix=f"after exposure", dry_run=dry_run) - - print("\nReached the end of current iteration of new exposures.") - if override_night is not None and (not continue_looping_debug): - print("\nOverride_night set, not waiting for new data before exiting.\n") - else: - sleep_and_report(data_cadence_time, message_suffix=f"before looking for more new data", - dry_run=(dry_run and ())) - - if len(ptable) > 0: - ptable = update_from_queue(ptable, dry_run_level=dry_run_level) - # ptable, nsubmits = update_and_recursively_submit(ptable, - # ptab_name=proc_table_pathname, dry_run_level=dry_run_level) - - ## Exposure table doesn't change in the interim, so no need to re-write it to disk - if dry_run_level < 3: - write_table(ptable, tablename=proc_table_pathname) - if override_night is None or continue_looping_debug: - sleep_and_report(10, message_suffix=f"after updating queue information", dry_run=dry_run) - - ## Flush the outputs - sys.stdout.flush() - sys.stderr.flush() - ## No more data coming in, so do bottleneck steps if any apply - if use_tilenight and len(sciences)>0: - extra_job_args = {} - extra_job_args['z_submit_types'] = z_submit_types - extra_job_args['laststeps'] = ['all','fluxcalib','skysub'] - ptable, sciences, internal_id \ - = submit_tilenight_and_redshifts(ptable, sciences, calibjobs, internal_id, - dry_run=dry_run_level, - queue=queue, - strictly_successful=True, - check_for_outputs=check_for_outputs, - resubmit_partial_complete=resubmit_partial_complete, - use_specter=use_specter, - extra_job_args=extra_job_args) - else: - ptable, calibjobs, sciences, internal_id \ - = checkfor_and_submit_joint_job(ptable, arcs, flats, sciences, calibjobs, - lasttype, internal_id, - dry_run=dry_run_level, queue=queue, - strictly_successful=True, - check_for_outputs=check_for_outputs, - resubmit_partial_complete=resubmit_partial_complete, - z_submit_types=z_submit_types) - ## All jobs now submitted, update information from job queue and save - ptable = update_from_queue(ptable, dry_run_level=dry_run_level) - if dry_run_level < 3: - write_table(ptable, tablename=proc_table_pathname) - - print(f"Completed submission of exposures for night {night}.") - - # ####################################### - # ########## Queue Cleanup ############## - # ####################################### - # print("Now resolving job failures.") - # - # ## Flush the outputs - # sys.stdout.flush() - # sys.stderr.flush() - # ## Now we resubmit failed jobs and their dependencies until all jobs have un-submittable end state - # ## e.g. they either succeeded or failed with a code-related issue - # ii,nsubmits = 0, 0 - # while ii < 4 and any_jobs_not_complete(ptable['STATUS']): - # print(f"Starting iteration {ii} of queue updating and resubmissions of failures.") - # ptable, nsubmits = update_and_recursively_submit(ptable, submits=nsubmits, - # ptab_name=proc_table_pathname, dry_run=dry_run_level) - # if dry_run_level < 3: - # write_table(ptable, tablename=proc_table_pathname) - # if any_jobs_not_complete(ptable['STATUS']): - # sleep_and_report(queue_cadence_time, message_suffix=f"after resubmitting job to queue", - # dry_run=(dry_run and (override_night is not None) and not (continue_looping_debug))) - # - # ptable = update_from_queue(ptable, dry_run=dry_run_level) - # if dry_run_level < 3: - # write_table(ptable, tablename=proc_table_pathname) - # ## Flush the outputs - # sys.stdout.flush() - # sys.stderr.flush() - # ii += 1 - # - # print("No job failures left.") - print("Exiting") - ## Flush the outputs - sys.stdout.flush() - sys.stderr.flush() diff --git a/deprecated/py/desispec/scripts/night.py b/deprecated/py/desispec/scripts/night.py deleted file mode 100644 index e82f11fcc..000000000 --- a/deprecated/py/desispec/scripts/night.py +++ /dev/null @@ -1,563 +0,0 @@ -# Licensed under a 3-clause BSD style license - see LICENSE.rst -# -*- coding: utf-8 -*- -""" -desispec.scripts.night -====================== - -Automated nightly processing. -""" -from __future__ import (absolute_import, division, print_function, - unicode_literals) - -import sys -import os -import re -import copy -import argparse -import subprocess as sp -import time -import warnings - -import fitsio - -from desiutil.log import get_logger - -from .. import io - -from .. import pipeline as pipe - -from ..pipeline import control as control - - -errs = { - "usage" : 1, - "pipefail" : 2, - "io" : 3 -} - - -class Nightly(object): - - def __init__(self): - self.log = get_logger() - - parser = argparse.ArgumentParser( - description="DESI nightly processing", - usage="""desi_night [options] - -Where supported commands are: - update Process an incoming exposure as much as possible. Arc exposures - will trigger PSF estimation. If the nightly PSF exists, then a - flat exposure will be extracted and a fiberflat will be created. - If the nightly PSF exists, then a science exposure will be - extracted. If the nightly fiberflat exists, then a science - exposure will be calibrated. - arcs All arcs are done, proceed with nightly PSF. - flats All flats are done, proceed with nightly fiberflat. - redshifts Regroup spectra and process all updated redshifts. -""") - parser.add_argument("command", help="Subcommand to run") - # parse_args defaults to [1:] for args, but you need to - # exclude the rest of the args too, or validation will fail - args = parser.parse_args(sys.argv[1:2]) - if not hasattr(self, args.command): - print("Unrecognized command") - parser.print_help() - sys.exit(errs["usage"]) - - # use dispatch pattern to invoke method with same name - getattr(self, args.command)() - - - def _update_db(self, night): - control.update(nightstr=night) - return - - - def _exposure_flavor(self, db, night, expid=None): - # What exposures are we using? - exp_by_flavor = dict() - for flv in ["arc", "flat", "science"]: - exp_by_flavor[flv] = list() - - if expid is not None: - # Only use this one - with db.cursor() as cur: - cmd = "select expid, flavor from fibermap where night = {} and expid = {}".format(night, expid) - cur.execute(cmd) - for result in cur.fetchall(): - exp_by_flavor[result[1]].append(result[0]) - else: - # Get them all - with db.cursor() as cur: - cmd = "select expid, flavor from fibermap where night = {}"\ - .format(night) - cur.execute(cmd) - for result in cur.fetchall(): - exp_by_flavor[result[1]].append(result[0]) - return exp_by_flavor - - - def _select_exposures(self, db, night, table, expid=None): - # Get the state and submitted status for all selected tasks. - # Technically this is redundant since the jobs run by desi_pipe chain - # will do the same checks- we are just avoiding calls that are not - # needed. - cmd = "select name, expid, state, submitted from {} where night = {}".format(table, night) - if expid is not None: - # Only use this one exposure - cmd = "{} and expid = {}".format(cmd, expid) - - exps = set() - with db.cursor() as cur: - cur.execute(cmd) - for result in cur.fetchall(): - if (pipe.task_int_to_state[result[2]] != "done") and \ - (result[3] != 1): - if result[1] not in exps: - self.log.info("found unprocessed {} exposure {}".format(table, result[1])) - exps.add(result[1]) - return list(sorted(exps)) - - - def _write_jobid(self, root, night, expid, jobid): - outdir = os.path.join(io.specprod_root(), - io.get_pipe_rundir(), - io.get_pipe_scriptdir(), - io.get_pipe_nightdir(), - night) - fname = "{}_{:08d}_{}".format(root, expid, jobid) - outfile = os.path.join(outdir, fname) - with open(outfile, "w") as f: - f.write(time.ctime()) - f.write("\n") - return - - - def _read_jobids(self, ttype, night): - outdir = os.path.join(io.specprod_root(), - io.get_pipe_rundir(), - io.get_pipe_scriptdir(), - io.get_pipe_nightdir(), - night) - pat = re.compile(r"{}_(\d{{8}})_(.*)".format(ttype)) - jobids = list() - for root, dirs, files in os.walk(outdir, topdown=True): - for f in files: - mat = pat.match(f) - if mat is not None: - jobids.append(mat.group(2)) - break - return jobids - - - def _small(self, args): - small = copy.copy(args) - if small.nersc_maxnodes_small is not None: - small.nersc_maxnodes = small.nersc_maxnodes_small - return small - - - def _run_chain(self, args, exps, db, night, tasktypes, deps=None, - spec=None): - log = get_logger() - jobids = list() - if exps is None: - log.info("Running chain for night = {}, tasktypes = {}, " - "deps = {}".format(night, tasktypes, deps)) - jobids = control.chain( - tasktypes, - nightstr=night, - spec=spec, - pack=True, - nosubmitted=True, - depjobs=deps, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - procs_per_node=args.procs_per_node, - out=os.path.join("night", night), - debug=False) - log.debug('Job IDs {}'.format(jobids)) - else: - for ex in exps: - log.info("Running chain for night = {}, tasktypes = {}, " - "expid = {}, deps = {}".format(night, tasktypes, ex, deps)) - exjobids = control.chain( - tasktypes, - nightstr=night, - expid=ex, - spec=spec, - pack=True, - nosubmitted=True, - depjobs=deps, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - procs_per_node=args.procs_per_node, - out=os.path.join("night", night), - debug=False) - log.debug('Job IDs {}'.format(exjobids)) - jobids.extend(exjobids) - return jobids - - - def _run_psf(self, args, db, night, expid=None, spec=None): - exps = self._select_exposures(db, night, "psf", expid=expid) - return self._run_chain(args, exps, db, night, ["preproc", "psf"], - spec=spec) - - - def _run_extract(self, args, exp_by_flavor, db, night, expid=None, - deps=None, spec=None): - exps = self._select_exposures(db, night, "extract", expid=expid) - jobids = list() - for ex in exps: - jids = None - # Regardless of exposure type, preprocess and traceshift in a - # single job. - trids = self._run_chain(self._small(args), [ex], db, night, - ["preproc", "traceshift"], deps=deps, spec=spec) - # Now either extract or also do fiberflat. - if ex in exp_by_flavor["flat"]: - jids = self._run_chain(args, [ex], db, night, - ["extract", "fiberflat"], deps=trids, spec=spec) - else: - jids = self._run_chain(args, [ex], db, night, - ["extract"], deps=trids, spec=spec) - jobids.extend(jids) - return jobids - - - def _run_calib(self, args, db, night, expid=None, deps=None, spec=None): - exps = self._select_exposures(db, night, "cframe", expid=expid) - # Swap in the modified args for "small" jobs - return self._run_chain(self._small(args), exps, db, night, - ["sky", "starfit", "fluxcalib", "cframe"], deps=deps, spec=spec) - - - def _check_nightly(self, ttype, db, night): - ready = True - deps = None - tnight = "{}night".format(ttype) - cmd = "select name, state, submitted from {} where night = {}".format(tnight, night) - with db.cursor() as cur: - cur.execute(cmd) - for result in cur.fetchall(): - if pipe.task_int_to_state[result[1]] != "done": - ready = False - if not ready: - # Did we already submit a job? - nids = self._read_jobids(tnight, night) - if len(nids) > 0: - ready = True - deps = nids - return ready, deps - - - def _check_nersc_host(self, args): - """Modify the --nersc argument based on the environment. - """ - if args.shell: - # We are forcibly generating shell scripts. - args.nersc = None - else: - if args.nersc is None: - if "NERSC_HOST" in os.environ: - if os.environ["NERSC_HOST"] == "cori": - args.nersc = "cori-haswell" - else: - args.nersc = os.environ["NERSC_HOST"] - return - - - def _pipe_opts(self, parser): - """Internal function to parse options passed to desi_night. - """ - parser.add_argument("--nersc", required=False, default=None, - help="write a script for this NERSC system (cori-haswell " - "| cori-knl). Default uses $NERSC_HOST") - - parser.add_argument("--shell", required=False, default=False, - action="store_true", - help="generate normal bash scripts, even if run on a NERSC system") - - parser.add_argument("--nersc_queue", required=False, default="regular", - help="write a script for this NERSC queue (debug | regular)") - - parser.add_argument("--nersc_queue_redshifts", required=False, - default=None, help="Use this NERSC queue for redshifts. " - "Defaults to same as --nersc_queue.") - - parser.add_argument("--nersc_maxtime", required=False, type=int, - default=0, help="Then maximum run time (in minutes) for a single " - " job. If the list of tasks cannot be run in this time, multiple " - " job scripts will be written. Default is the maximum time for " - " the specified queue.") - - parser.add_argument("--nersc_maxnodes", required=False, type=int, - default=0, help="The maximum number of nodes to use. Default " - " is the maximum nodes for the specified queue.") - - parser.add_argument("--nersc_maxnodes_small", required=False, type=int, - default=0, help="The maximum number of nodes to use for 'small' " - "steps like the per-night psf and fiberflat. Default is to use the" - " same value as --nersc_maxnodes.") - - parser.add_argument("--nersc_maxnodes_redshifts", required=False, - type=int, default=0, help="The maximum number of nodes to use for " - " redshifts. Default is to use --nersc_maxnodes.") - - parser.add_argument("--nersc_shifter", required=False, default=None, - help="The shifter image to use for NERSC jobs") - - parser.add_argument("--mpi_procs", required=False, type=int, default=1, - help="The number of MPI processes to use for non-NERSC shell " - "scripts (default 1)") - - parser.add_argument("--mpi_run", required=False, type=str, default="", - help="The command to launch MPI programs " - "for non-NERSC shell scripts (default do not use MPI)") - - parser.add_argument("--procs_per_node", required=False, type=int, - default=0, help="The number of processes to use per node. If not " - "specified it uses a default value for each machine.") - - parser.add_argument("--debug", required=False, default=False, - action="store_true", help="debugging messages in job logs") - - return parser - - - def update(self): - parser = argparse.ArgumentParser(description="Run processing on " - "new data", usage="desi_night update [options] (use --help for " - "details)") - - parser.add_argument("--night", required=True, default=None, - help="The night in YYYYMMDD format.") - - parser.add_argument("--expid", required=False, type=int, default=-1, - help="Only process this exposure.") - - parser.add_argument("--spec", required=False, type=int, default=-1, - help="Only process a single spectrograph. (FOR DEBUGGING ONLY)") - - parser = self._pipe_opts(parser) - - args = parser.parse_args(sys.argv[2:]) - - self._check_nersc_host(args) - - # First update the DB - self._update_db(args.night) - - # Now load the DB - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="w") - - # Get our exposures to consider and their flavors - expid = None - if args.expid >= 0: - expid = args.expid - - spec = None - if args.spec >= 0: - spec = args.spec - - expid_by_flavor = self._exposure_flavor(db, args.night, expid=expid) - - # If there are any arcs, we always process them - for ex in expid_by_flavor["arc"]: - jobids = self._run_psf(args, db, args.night, expid=ex, spec=spec) - #FIXME: once we have a job table in the DB, the job ID will - # be recorded automatically. Until then, we record the PSF job - # IDs in some file names so that the psfnight job can get the - # dependencies correct. - for jid in jobids: - self._write_jobid("psf", args.night, ex, jid) - - # Check whether psfnight tasks are done or submitted - ntpsfready, ntpsfdeps = self._check_nightly("psf", db, args.night) - - # Check whether fiberflatnight tasks are done or submitted - ntflatready, ntflatdeps = self._check_nightly("fiberflat", db, args.night) - - if ntpsfready: - # We can do extractions - for ex in expid_by_flavor["flat"]: - jobids = self._run_extract(args, expid_by_flavor, db, args.night, expid=ex, deps=ntpsfdeps, spec=spec) - #FIXME: once we have a job table in the DB, the job ID will - # be recorded automatically. Until then, we record the - # fiberflat job IDs in some file names so that the - # fiberflatnight job can get the dependencies correct. - for jid in jobids: - self._write_jobid("fiberflat", args.night, ex, jid) - - for ex in expid_by_flavor["science"]: - exids = self._run_extract(args, expid_by_flavor, db, args.night, expid=ex, deps=ntpsfdeps, spec=spec) - if ntflatready: - # We can submit calibration jobs too. - alldeps = None - if len(exids) > 0: - alldeps = list(exids) - if ntflatdeps is not None: - if alldeps is None: - alldeps = list(ntflatdeps) - else: - alldeps.extend(ntflatdeps) - calids = self._run_calib(args, db, args.night, expid=ex, - deps=alldeps) - for cid in calids: - self._write_jobid("cframe", args.night, ex, cid) - else: - allexp = [ "{}".format(x) for x in expid_by_flavor["science"] ] - msg = "Attempting to update processing of science exposures before the nightly fiberflat has been submitted. Calibration has been skipped for the following exposures: {} You should resubmit these exposures after running 'desi_night flats'".format(",".join(allexp)) - warnings.warn(msg, RuntimeWarning) - else: - if (len(expid_by_flavor["flat"]) > 0) or (len(expid_by_flavor["science"]) > 0): - allexp = [ "{}".format(x) for x in expid_by_flavor["science"] ] - allexp.extend([ "{}".format(x) for x in expid_by_flavor["flat"] ]) - msg = "Attempting to update processing with flats and/or science exposures before the nightly PSF has been submitted. The following exposures have been skipped: {} You should resubmit these exposures after running 'desi_night arcs'".format(",".join(allexp)) - warnings.warn(msg, RuntimeWarning) - - return - - - def arcs(self): - parser = argparse.ArgumentParser(description="Arcs are finished, " - "create nightly PSF", usage="desi_night arcs [options] (use " - "--help for details)") - - parser.add_argument("--night", required=True, default=None, - help="The night in YYYYMMDD format.") - - parser.add_argument("--spec", required=False, type=int, default=-1, - help="Only select tasks for a single spectrograph. (FOR DEBUGGING ONLY)") - - parser = self._pipe_opts(parser) - - args = parser.parse_args(sys.argv[2:]) - - self._check_nersc_host(args) - - spec = None - if args.spec >= 0: - spec = args.spec - - # First update the DB - self._update_db(args.night) - - # Now load the DB - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="w") - - # Check whether psfnight tasks are already done or submitted - ntpsfready, ntpsfdeps = self._check_nightly("psf", db, args.night) - - if ntpsfready: - if ntpsfdeps is None: - self.log.info("psfnight for {} already done".format(args.night)) - else: - self.log.info("psfnight for {} already submitted to queue (job = {})".format(args.night, ntpsfdeps)) - else: - # Safe to run. Get the job IDs of any previous psf tasks. - psfjobs = self._read_jobids("psf", args.night) - deps = None - if len(psfjobs) > 0: - deps = psfjobs - jid = self._run_chain(self._small(args), None, db, args.night, - ["psfnight"], deps=deps, spec=spec) - self._write_jobid("psfnight", args.night, 0, jid[0]) - return - - - def flats(self): - parser = argparse.ArgumentParser(description="Flats are finished, " - "create nightly fiberflat", usage="desi_night flats [options] (use " - "--help for details)") - - parser.add_argument("--night", required=True, default=None, - help="The night in YYYYMMDD format.") - - parser.add_argument("--spec", required=False, type=int, default=-1, - help="Only select tasks for a single spectrograph. (FOR DEBUGGING ONLY)") - - parser = self._pipe_opts(parser) - - args = parser.parse_args(sys.argv[2:]) - - self._check_nersc_host(args) - - spec = None - if args.spec >= 0: - spec = args.spec - - # First update the DB - self._update_db(args.night) - - # Now load the DB - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="w") - - # Check whether psfnight tasks are already done or submitted - ntflatready, ntflatdeps = self._check_nightly("fiberflat", db, args.night) - - if ntflatready: - if ntflatdeps is None: - self.log.info("fiberflatnight for {} already done".format(args.night)) - else: - self.log.info("fiberflatnight for {} already submitted to queue (job = {})".format(args.night, ntflatdeps)) - else: - # Safe to run. Get the job IDs of any previous fiberflat tasks. - flatjobs = self._read_jobids("fiberflat", args.night) - deps = None - if len(flatjobs) > 0: - deps = flatjobs - jid = self._run_chain(self._small(args), None, db, args.night, - ["fiberflatnight"], deps=deps, spec=spec) - self._write_jobid("fiberflatnight", args.night, 0, jid[0]) - return - - - def redshifts(self): - parser = argparse.ArgumentParser(description="Run spectra grouping and redshifts", usage="desi_night redshifts [options] (use " - "--help for details)") - - parser.add_argument("--night", required=True, default=None, - help="The night in YYYYMMDD format.") - - parser = self._pipe_opts(parser) - - args = parser.parse_args(sys.argv[2:]) - - self._check_nersc_host(args) - - # First update the DB - self._update_db(args.night) - - # Now load the DB - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="w") - - # Get the list of submitted cframe jobs. Use these as our dependencies. - cframejobs = self._read_jobids("cframe", args.night) - - # Run it - redargs = copy.copy(args) - if redargs.nersc_queue_redshifts is not None: - redargs.nersc_queue = redargs.nersc_queue_redshifts - if redargs.nersc_maxnodes_redshifts is not None: - redargs.nersc_maxnodes = redargs.nersc_maxnodes_redshifts - jid = self._run_chain(redargs, None, db, args.night, "spectra,redshift", - deps=cframejobs) - - return diff --git a/deprecated/py/desispec/scripts/pipe.py b/deprecated/py/desispec/scripts/pipe.py deleted file mode 100644 index 7156ab687..000000000 --- a/deprecated/py/desispec/scripts/pipe.py +++ /dev/null @@ -1,963 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.scripts.pipe -===================== - -Interactive control of the pipeline -""" - -from __future__ import absolute_import, division, print_function - -import sys -import os -import argparse -import re -import glob -from collections import OrderedDict - -import subprocess -import numpy as np - -from .. import io - -from desiutil.log import get_logger - -from .. import pipeline as pipe - -from ..pipeline import control as control - - -class PipeUI(object): - - def __init__(self): - self.pref = "DESI" - - parser = argparse.ArgumentParser( - description="DESI pipeline control", - usage="""desi_pipe [options] - -Where supported commands are (use desi_pipe --help for details): - (------- High-Level -------) - create Create a new production. - go Run a full production. - update Update an existing production. - top Live display of production database. - status Overview of production. - (------- Mid-Level --------) - chain Run all ready tasks for multiple pipeline steps. - cleanup Reset "running" (or optionally "failed") tasks back to "ready". - (------- Low-Level --------) - tasks Get all possible tasks for a given type and states. - check Check the status of tasks. - dryrun Return the equivalent command line entrypoint for tasks. - script Generate a shell or slurm script. - run Generate a script and run it. - getready Auto-Update of prod DB. - sync Synchronize DB state based on the filesystem. - env Print current production location. - query Direct sql query to the database. - -""") - parser.add_argument("command", help="Subcommand to run") - # parse_args defaults to [1:] for args, but you need to - # exclude the rest of the args too, or validation will fail - args = parser.parse_args(sys.argv[1:2]) - if not hasattr(self, args.command): - print("Unrecognized command") - parser.print_help() - sys.exit(1) - - # use dispatch pattern to invoke method with same name - getattr(self, args.command)() - - - def env(self): - rawdir = io.rawdata_root() - proddir = io.specprod_root() - print("{}{:<22} = {}{}{}".format( - self.pref, "Raw data directory", control.clr.OKBLUE, rawdir, - control.clr.ENDC) - ) - print("{}{:<22} = {}{}{}".format( - self.pref, "Production directory", control.clr.OKBLUE, proddir, - control.clr.ENDC) - ) - return - - def query(self): - parser = argparse.ArgumentParser(\ - description="Query the DB", - usage="desi_pipe query 'sql_command' [--rw] (use --help for details)") - parser.add_argument('cmd', metavar='cmd', type=str, - help="SQL command in quotes, like 'select * from preproc'") - parser.add_argument("--rw", action = "store_true", - help="read/write mode (use with care, experts only). Default is read only") - args = parser.parse_args(sys.argv[2:]) - dbpath = io.get_pipe_database() - if args.rw : - mode="w" - else : - mode="r" - db = pipe.load_db(dbpath, mode=mode) - with db.cursor() as cur: - cur.execute(args.cmd) - st = cur.fetchall() - for entry in st : - line="" - for prop in entry : - line += " {}".format(prop) - print(line) - - def create(self): - parser = argparse.ArgumentParser(\ - description="Create a new production", - usage="desi_pipe create [options] (use --help for details)") - - parser.add_argument("--root", required=False, default=None, - help="value to use for DESI_ROOT") - - parser.add_argument("--data", required=False, default=None, - help="value to use for DESI_SPECTRO_DATA") - - parser.add_argument("--redux", required=False, default=None, - help="value to use for DESI_SPECTRO_REDUX") - - parser.add_argument("--prod", required=False, default=None, - help="value to use for SPECPROD") - - parser.add_argument("--force", action = "store_true", - help="force DB creation even if prod exists on disk (useful for simulations") - - parser.add_argument("--basis", required=False, default=None, - help="value to use for DESI_BASIS_TEMPLATES") - - parser.add_argument("--calib", required=False, default=None, - help="value to use for DESI_SPECTRO_CALIB") - - parser.add_argument("--db-sqlite", required=False, default=False, - action="store_true", help="Use SQLite database backend.") - - parser.add_argument("--db-sqlite-path", type=str, required=False, - default=None, help="Override path to SQLite DB") - - parser.add_argument("--db-postgres", required=False, default=False, - action="store_true", help="Use PostgreSQL database backend. " - "You must correctly configure your ~/.pgpass file!") - - parser.add_argument("--db-postgres-host", type=str, required=False, - default="nerscdb03.nersc.gov", help="Set PostgreSQL hostname") - - parser.add_argument("--db-postgres-port", type=int, required=False, - default=5432, help="Set PostgreSQL port number") - - parser.add_argument("--db-postgres-name", type=str, required=False, - default="desidev", help="Set PostgreSQL DB name") - - parser.add_argument("--db-postgres-user", type=str, required=False, - default="desidev_admin", help="Set PostgreSQL user name") - - parser.add_argument("--db-postgres-authorized", type=str, - required=False, default="desidev_ro", - help="Additional PostgreSQL users / roles to authorize") - - parser.add_argument("--nside", required=False, type=int, default=64, - help="HEALPix nside value to use for spectral grouping.") - - args = parser.parse_args(sys.argv[2:]) - - control.create( - root=args.root, - data=args.data, - redux=args.redux, - prod=args.prod, - force=args.force, - basis=args.basis, - calib=args.calib, - db_sqlite=args.db_sqlite, - db_sqlite_path=args.db_sqlite_path, - db_postgres=args.db_postgres, - db_postgres_host=args.db_postgres_host, - db_postgres_port=args.db_postgres_port, - db_postgres_name=args.db_postgres_name, - db_postgres_user=args.db_postgres_user, - db_postgres_authorized=args.db_postgres_authorized, - nside=args.nside) - - return - - - def update(self): - parser = argparse.ArgumentParser(description="Update a production", - usage="desi_pipe update [options] (use --help for details)") - - parser.add_argument("--nights", required=False, default=None, - help="comma separated (YYYYMMDD) or regex pattern- only nights " - "matching these patterns will be examined.") - - parser.add_argument("--nside", required=False, type=int, default=64, - help="HEALPix nside value to use for spectral grouping.") - - parser.add_argument("--expid", required=False, type=int, default=-1, - help="Only update the production for a single exposure ID.") - - args = parser.parse_args(sys.argv[2:]) - - expid = None - if args.expid >= 0: - expid = args.expid - - control.update(nightstr=args.nights, nside=args.nside, - expid=expid) - - return - - - def tasks(self): - availtypes = ",".join(pipe.tasks.base.default_task_chain) - - parser = argparse.ArgumentParser(description="Get all tasks of a " - "particular type for one or more nights", - usage="desi_pipe tasks [options] (use --help for details)") - - parser.add_argument("--tasktypes", required=False, default=availtypes, - help="comma separated list of task types ({})".format(availtypes)) - - parser.add_argument("--nights", required=False, default=None, - help="comma separated (YYYYMMDD) or regex pattern- only nights " - "matching these patterns will be examined.") - - parser.add_argument("--expid", required=False, type=int, default=-1, - help="Only select tasks for a single exposure ID.") - - parser.add_argument("--spec", required=False, type=int, default=-1, - help="Only select tasks for a single spectrograph.") - - parser.add_argument("--states", required=False, default=None, - help="comma separated list of states (see defs.py). Only tasks " - "in these states will be returned.") - - parser.add_argument("--nosubmitted", required=False, default=False, - action="store_true", help="Skip all tasks flagged as submitted") - - parser.add_argument("--taskfile", required=False, default=None, - help="write tasks to this file (if not specified, write to STDOUT)") - - parser.add_argument("--db-postgres-user", type=str, required=False, - default="desidev_ro", help="If using postgres, connect as this " - "user for read-only access") - - args = parser.parse_args(sys.argv[2:]) - - expid = None - if args.expid >= 0: - expid = args.expid - - spec = None - if args.spec >= 0: - spec = args.spec - - states = None - if args.states is not None: - states = args.states.split(",") - - ttypes = None - if args.tasktypes is not None: - ttypes = args.tasktypes.split(",") - - control.tasks( - ttypes, - nightstr=args.nights, - states=states, - expid=expid, - spec=spec, - nosubmitted=args.nosubmitted, - db_postgres_user=args.db_postgres_user, - taskfile=args.taskfile) - - return - - - def getready(self): - parser = argparse.ArgumentParser(description="Update database to " - "for one or more nights to ensure that forward dependencies " - "know that they are ready.", - usage="desi_pipe getready [options] (use --help for details)") - - parser.add_argument("--nights", required=False, default=None, - help="comma separated (YYYYMMDD) or regex pattern- only nights " - "matching these patterns will be examined.") - - args = parser.parse_args(sys.argv[2:]) - - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="w") - - control.getready(db, nightstr=args.nights) - - return - - - def check(self): - parser = argparse.ArgumentParser(\ - description="Check the state of pipeline tasks", - usage="desi_pipe check [options] (use --help for details)") - - parser.add_argument("--taskfile", required=False, default=None, - help="read tasks from this file (if not specified, read from " - "STDIN)") - - parser.add_argument("--nodb", required=False, default=False, - action="store_true", help="Do not use the production database.") - - parser.add_argument("--db-postgres-user", type=str, required=False, - default="desidev_ro", help="If using postgres, connect as this " - "user for read-only access") - - args = parser.parse_args(sys.argv[2:]) - - tasks = pipe.prod.task_read(args.taskfile) - - db = None - if not args.nodb: - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="r", user=args.db_postgres_user) - - states = control.check_tasks(tasks, db=db) - - for tsk in tasks: - print("{} : {}".format(tsk, states[tsk])) - sys.stdout.flush() - - return - - - def sync(self): - availtypes = ",".join(pipe.tasks.base.default_task_chain) - - parser = argparse.ArgumentParser(\ - description="Synchronize DB state based on the filesystem.", - usage="desi_pipe sync [options] (use --help for details)") - - parser.add_argument("--nights", required=False, default=None, - help="comma separated (YYYYMMDD) or regex pattern- only nights " - "matching these patterns will be examined.") - parser.add_argument("--force-spec-done", action="store_true", - help="force setting spectra file to state done if file exists independently of state of parent cframes.") - - args = parser.parse_args(sys.argv[2:]) - - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="w") - - control.sync(db, nightstr=args.nights,specdone=args.force_spec_done) - - return - - - def cleanup(self): - availtypes = ",".join(pipe.tasks.base.default_task_chain) - - parser = argparse.ArgumentParser(\ - description="Clean up stale task states in the DB", - usage="desi_pipe cleanup [options] (use --help for details)") - - parser.add_argument("--failed", required=False, default=False, - action="store_true", help="Also clear failed states") - - parser.add_argument("--submitted", required=False, default=False, - action="store_true", help="Also clear submitted flag") - - parser.add_argument("--tasktypes", required=False, default=None, - help="comma separated list of task types to clean ({})".format(availtypes)) - - parser.add_argument("--expid", required=False, type=int, default=-1, - help="Only clean tasks for this exposure ID.") - - args = parser.parse_args(sys.argv[2:]) - - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="w") - - ttypes = None - if args.tasktypes is not None: - ttypes = args.tasktypes.split(",") - - expid = None - if args.expid >= 0: - expid = args.expid - - control.cleanup( - db, - ttypes, - failed=args.failed, - submitted=args.submitted, - expid=expid) - - return - - - def _check_nersc_host(self, args): - """Modify the --nersc argument based on the environment. - """ - if args.shell: - # We are forcibly generating shell scripts. - args.nersc = None - else: - if args.nersc is None: - if "NERSC_HOST" in os.environ: - if os.environ["NERSC_HOST"] == "cori": - args.nersc = "cori-haswell" - else: - args.nersc = os.environ["NERSC_HOST"] - return - - - def _parse_run_opts(self, parser): - """Internal function to parse options for running. - - This provides a consistent set of run-time otpions for the - "dryrun", "script", and "run" commands. - - """ - parser.add_argument("--nersc", required=False, default=None, - help="write a script for this NERSC system (cori-haswell " - "| cori-knl). Default uses $NERSC_HOST") - - parser.add_argument("--shell", required=False, default=False, - action="store_true", - help="generate normal bash scripts, even if run on a NERSC system") - - parser.add_argument("--nersc_queue", required=False, default="regular", - help="write a script for this NERSC queue (debug | regular)") - - parser.add_argument("--nersc_maxtime", required=False, type=int, - default=0, help="Then maximum run time (in minutes) for a single " - " job. If the list of tasks cannot be run in this time, multiple " - " job scripts will be written. Default is the maximum time for " - " the specified queue.") - - parser.add_argument("--nersc_maxnodes", required=False, type=int, - default=0, help="The maximum number of nodes to use. Default " - " is the maximum nodes for the specified queue.") - - parser.add_argument("--nersc_shifter", required=False, default=None, - help="The shifter image to use for NERSC jobs") - - parser.add_argument("--mpi_procs", required=False, type=int, default=1, - help="The number of MPI processes to use for non-NERSC shell " - "scripts (default 1)") - - parser.add_argument("--mpi_run", required=False, type=str, - default="", help="The command to launch MPI programs " - "for non-NERSC shell scripts (default do not use MPI)") - - parser.add_argument("--procs_per_node", required=False, type=int, - default=0, help="The number of processes to use per node. If not " - "specified it uses a default value for each machine.") - - parser.add_argument("--outdir", required=False, default=None, - help="Put task scripts and logs in this directory relative to the " - "production 'scripts' directory. Default puts task directory " - "in the main scripts directory.") - - parser.add_argument("--debug", required=False, default=False, - action="store_true", help="debugging messages in job logs") - - return parser - - - def dryrun(self): - availtypes = ",".join(pipe.tasks.base.default_task_chain) - - parser = argparse.ArgumentParser(description="Print equivalent " - "command-line jobs that would be run given the tasks and total" - "number of processes", - usage="desi_pipe dryrun [options] (use --help for details)") - - parser.add_argument("--taskfile", required=False, default=None, - help="read tasks from this file (if not specified, read from " - "STDIN)") - - parser = self._parse_run_opts(parser) - - parser.add_argument("--nodb", required=False, default=False, - action="store_true", help="Do not use the production database.") - - parser.add_argument("--db-postgres-user", type=str, required=False, - default="desidev_ro", help="If using postgres, connect as this " - "user for read-only access") - - parser.add_argument("--force", required=False, default=False, - action="store_true", help="print commands for all tasks, not" - " only the ready ones") - - args = parser.parse_args(sys.argv[2:]) - - self._check_nersc_host(args) - - tasks = pipe.prod.task_read(args.taskfile) - - control.dryrun( - tasks, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - nodb=args.nodb, - db_postgres_user=args.db_postgres_user, - force=args.force) - - return - - - def script(self): - availtypes = ",".join(pipe.tasks.base.default_task_chain) - - parser = argparse.ArgumentParser(description="Create batch script(s) " - "for the list of tasks. If the --nersc option is not given, " - "create shell script(s) that optionally uses mpirun. Prints" - " the list of generated scripts to STDOUT.", - usage="desi_pipe script [options] (use --help for details)") - - parser.add_argument("--taskfile", required=False, default=None, - help="read tasks from this file (if not specified, read from " - "STDIN)") - - parser = self._parse_run_opts(parser) - - parser.add_argument("--nodb", required=False, default=False, - action="store_true", help="Do not use the production database.") - - parser.add_argument("--db-postgres-user", type=str, required=False, - default="desidev_ro", help="If using postgres, connect as this " - "user for read-only access") - - args = parser.parse_args(sys.argv[2:]) - - self._check_nersc_host(args) - - scripts = control.script( - args.taskfile, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - procs_per_node=args.procs_per_node, - nodb=args.nodb, - out=args.outdir, - db_postgres_user=args.db_postgres_user) - - if len(scripts) > 0: - print(",".join(scripts)) - - return - - - def run(self): - availtypes = ",".join(pipe.tasks.base.default_task_chain) - - parser = argparse.ArgumentParser(description="Create and run batch " - "script(s) for the list of tasks. If the --nersc option is not " - "given, create shell script(s) that optionally uses mpirun.", - usage="desi_pipe run [options] (use --help for details)") - - parser.add_argument("--taskfile", required=False, default=None, - help="Read tasks from this file (if not specified, read from " - "STDIN). Tasks of all types will be packed into a single job!") - - parser.add_argument("--nosubmitted", required=False, default=False, - action="store_true", help="Skip all tasks flagged as submitted") - - parser.add_argument("--depjobs", required=False, default=None, - help="comma separated list of slurm job IDs to specify as " - "dependencies of this current job.") - - parser = self._parse_run_opts(parser) - - parser.add_argument("--nodb", required=False, default=False, - action="store_true", help="Do not use the production database.") - - args = parser.parse_args(sys.argv[2:]) - - self._check_nersc_host(args) - - deps = None - if args.depjobs is not None: - deps = args.depjobs.split(",") - - jobids = control.run( - args.taskfile, - nosubmitted=args.nosubmitted, - depjobs=deps, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - procs_per_node=args.procs_per_node, - nodb=args.nodb, - out=args.outdir, - debug=args.debug) - - if len(jobids) > 0: - print(",".join(jobids)) - - return - - - def chain(self): - parser = argparse.ArgumentParser(description="Create a chain of jobs" - " using all ready tasks for each specified step. The order of" - " the pipeline steps is fixed, regardless of the order specified" - " by the --tasktypes option.", - usage="desi_pipe chain [options] (use --help for details)") - - parser.add_argument("--tasktypes", required=False, default=",".join(pipe.tasks.base.default_task_chain), - help="comma separated list of slurm job IDs to specify as " - "dependencies of this current job.") - - parser.add_argument("--nights", required=False, default=None, - help="comma separated (YYYYMMDD) or regex pattern- only nights " - "matching these patterns will be generated.") - - parser.add_argument("--expid", required=False, type=int, default=-1, - help="Only select tasks for a single exposure ID.") - - parser.add_argument("--spec", required=False, type=int, default=-1, - help="Only select tasks for a single spectrograph.") - - parser.add_argument("--states", required=False, default=None, - help="comma separated list of states (see defs.py). Only tasks " - "in these states will be scheduled.") - - parser.add_argument("--pack", required=False, default=False, - action="store_true", help="Pack the chain of pipeline steps " - "into a single job script.") - - parser.add_argument("--nosubmitted", required=False, default=False, - action="store_true", help="Skip all tasks flagged as submitted") - - parser.add_argument("--depjobs", required=False, default=None, - help="comma separated list of slurm job IDs to specify as " - "dependencies of this current job.") - - parser.add_argument("--dryrun", action="store_true", - help="do not submit the jobs.") - - parser = self._parse_run_opts(parser) - - args = parser.parse_args(sys.argv[2:]) - - self._check_nersc_host(args) - - expid = None - if args.expid >= 0: - expid = args.expid - - spec = None - if args.spec >= 0: - spec = args.spec - - states = None - if args.states is not None: - states = args.states.split(",") - - deps = None - if args.depjobs is not None: - deps = args.depjobs.split(",") - - jobids = control.chain( - args.tasktypes.split(","), - nightstr=args.nights, - states=states, - expid=expid, - spec=spec, - pack=args.pack, - nosubmitted=args.nosubmitted, - depjobs=deps, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - procs_per_node=args.procs_per_node, - out=args.outdir, - debug=args.debug, - dryrun=args.dryrun) - - if jobids is not None and len(jobids) > 0: - print(",".join(jobids)) - - return - - - def go(self): - parser = argparse.ArgumentParser(description="Run a full production " - "from start to finish. This will pack steps into 3 jobs per night" - " and then run redshift fitting after all nights are done. Note " - "that if you are running multiple nights you should use the " - "regular queue.", - usage="desi_pipe go [options] (use --help for details)") - - parser.add_argument("--nights", required=False, default=None, - help="comma separated (YYYYMMDD) or regex pattern- only nights " - "matching these patterns will be generated.") - - parser.add_argument("--states", required=False, default=None, - help="comma separated list of states. This argument is " - "passed to chain (see desi_pipe chain --help for more info).") - parser.add_argument("--resume", action = 'store_true', - help="same as --states waiting,ready") - - parser.add_argument("--dryrun", action="store_true", - help="do not submit the jobs.") - - parser = self._parse_run_opts(parser) - - args = parser.parse_args(sys.argv[2:]) - - if args.resume : - if args.states is not None : - print("Ambiguous arguments: cannot specify --states along with --resume option which would overwrite the list of states.") - return - else : - args.states="waiting,ready" - - self._check_nersc_host(args) - - allnights = io.get_nights(strip_path=True) - nights = pipe.prod.select_nights(allnights, args.nights) - - log = get_logger() - - blocks = [ - ["preproc", "psf", "psfnight"], - ["traceshift", "extract"], - ["fiberflat", "fiberflatnight", "sky", "starfit", "fluxcalib", - "cframe"], - ] - - nightlast = list() - - states = args.states - if states is not None : - states = states.split(",") - - for nt in nights: - previous = None - log.info("Submitting processing chains for night {}".format(nt)) - for blk in blocks: - jobids = control.chain( - blk, - nightstr="{}".format(nt), - pack=True, - depjobs=previous, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - procs_per_node=args.procs_per_node, - out=args.outdir, - states=states, - debug=args.debug, - dryrun=args.dryrun) - if jobids is not None and len(jobids)>0 : - previous = [ jobids[-1] ] - if previous is not None and len(previous)>0 : - nightlast.append(previous[-1]) - - # Submit spectal grouping - jobids = control.chain( - ["spectra"], - pack=True, - depjobs=nightlast, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - procs_per_node=args.procs_per_node, - out=args.outdir, - states=states, - debug=args.debug, - dryrun=args.dryrun) - - previous = None - if jobids is not None and len(jobids)>0 : - previous = [ jobids[-1] ] - - # Submit redshifts (and coadds) - jobids = control.chain( - ["redshift"], - pack=True, - depjobs=previous, - nersc=args.nersc, - nersc_queue=args.nersc_queue, - nersc_maxtime=args.nersc_maxtime, - nersc_maxnodes=args.nersc_maxnodes, - nersc_shifter=args.nersc_shifter, - mpi_procs=args.mpi_procs, - mpi_run=args.mpi_run, - procs_per_node=args.procs_per_node, - out=args.outdir, - states=states, - debug=args.debug, - dryrun=args.dryrun) - - return - - - def status(self): - availtypes = ",".join(pipe.tasks.base.default_task_chain) - - parser = argparse.ArgumentParser(\ - description="Explore status of pipeline tasks", - usage="desi_pipe status [options] (use --help for details)") - - parser.add_argument("--task", required=False, default=None, - help="get log information about this specific task") - - parser.add_argument("--tasktypes", required=False, default=None, - help="comma separated list of task types ({})".format(availtypes)) - - parser.add_argument("--nights", required=False, default=None, - help="comma separated (YYYYMMDD) or regex pattern- only nights " - "matching these patterns will be examined.") - - parser.add_argument("--expid", required=False, type=int, default=None, - help="Only select tasks for a single exposure ID.") - - parser.add_argument("--spec", required=False, type=int, default=None, - help="Only select tasks for a single spectrograph.") - - parser.add_argument("--states", required=False, default=None, - help="comma separated list of states (see defs.py). Only tasks " - "in these states will be returned.") - - parser.add_argument("--db-postgres-user", type=str, required=False, - default="desidev_ro", help="If using postgres, connect as this " - "user for read-only access") - - args = parser.parse_args(sys.argv[2:]) - - ttypes = None - if args.tasktypes is not None: - ttypes = args.tasktypes.split(",") - - states = None - if args.states is not None: - states = args.states.split(",") - - control.status( - task=args.task, tasktypes=ttypes, nightstr=args.nights, - states=states, expid=args.expid, spec=args.spec, - db_postgres_user=args.db_postgres_user - ) - - return - - - def top(self): - parser = argparse.ArgumentParser(\ - description="Live overview of the production state", - usage="desi_pipe top [options] (use --help for details)") - - parser.add_argument("--refresh", required=False, type=int, default=10, - help="The number of seconds between DB queries") - - parser.add_argument("--db-postgres-user", type=str, required=False, - default="desidev_ro", help="If using postgres, connect as this " - "user for read-only access") - - parser.add_argument("--once", required=False, action="store_true", - default=False, help="Print info once without clearing the terminal") - - args = parser.parse_args(sys.argv[2:]) - - import signal - import time - import numpy as np - - def signal_handler(signal, frame): - sys.exit(0) - signal.signal(signal.SIGINT, signal_handler) - - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="r", user=args.db_postgres_user) - - tasktypes = pipe.tasks.base.default_task_chain - - header_state = "" - for s in pipe.task_states: - header_state = "{} {:8s}|".format(header_state, s) - header_state = "{} {:8s}|".format(header_state, "submit") - - sep = "----------------+---------+---------+---------+---------+---------+---------+" - - header = "{}\n{:16s}|{}\n{}".format(sep, " Task Type", - header_state, sep) - - def print_status(clear=False): - taskstates = {} - tasksub = {} - with db.cursor() as cur: - for t in tasktypes: - taskstates[t] = {} - cmd = "select state from {}".format(t) - cur.execute(cmd) - st = np.array([ int(x[0]) for x in cur.fetchall() ]) - for s in pipe.task_states: - taskstates[t][s] = \ - np.sum(st == pipe.task_state_to_int[s]) - if (t != "spectra") and (t != "redshift"): - cmd = "select submitted from {}".format(t) - cur.execute(cmd) - isub = [ int(x[0]) for x in cur.fetchall() ] - tasksub[t] = np.sum(isub).astype(int) - if clear: - print("\033c") - print(header) - for t in tasktypes: - line = "{:16s}|".format(t) - for s in pipe.task_states: - line = "{}{:9d}|".format(line, taskstates[t][s]) - if t in tasksub: - line = "{}{:9d}|".format(line, tasksub[t]) - else: - line = "{}{:9s}|".format(line, " NA") - print(line) - print(sep) - if clear: - print(" (Use Ctrl-C to Quit) ") - sys.stdout.flush() - - if args.once: - print_status(clear=False) - else: - while True: - print_status(clear=True) - time.sleep(args.refresh) - - return - - -def main(): - p = PipeUI() diff --git a/deprecated/py/desispec/scripts/pipe_exec.py b/deprecated/py/desispec/scripts/pipe_exec.py deleted file mode 100644 index e5c8a729a..000000000 --- a/deprecated/py/desispec/scripts/pipe_exec.py +++ /dev/null @@ -1,178 +0,0 @@ -# -# See top-level LICENSE.rst file for Copyright information -# -# -*- coding: utf-8 -*- -""" -desispec.scripts.pipe_exec -========================== - -Run one or more pipeline tasks. -""" - -from __future__ import absolute_import, division, print_function - -import sys -import os -import time -import datetime -import numpy as np -import argparse -import re -import warnings - -import desispec.io as io - -from desiutil.log import get_logger - -import desispec.pipeline as pipe - - -def parse(options=None): - parser = argparse.ArgumentParser(description="Run pipeline tasks of a " - "single type") - parser.add_argument("--tasktype", required=False, default=None, - help="The type of the input tasks.") - parser.add_argument("--force", required=False, default=False, - action="store_true", help="Run tasks regardless of DB or file state.") - parser.add_argument("--nodb", required=False, default=False, - action="store_true", help="Do not use the production database.") - parser.add_argument("--taskfile", required=False, default=None, - help="Use a file containing the list of tasks. If not specified, " - "use --task or read list of tasks from STDIN") - parser.add_argument("--task", required=False, default=None, - help="Single task to run") - - args = None - if options is None: - args = parser.parse_args() - else: - args = parser.parse_args(options) - return args - - -def main(args, comm=None): - t1 = datetime.datetime.now() - - log = get_logger() - - rank = 0 - nproc = 1 - if comm is not None: - rank = comm.rank - nproc = comm.size - - # Check start up time. - - if rank == 0: - if "STARTTIME" in os.environ: - try: - t0 = datetime.datetime.strptime(os.getenv("STARTTIME"), "%Y%m%d-%H%M%S") - dt = t1 - t0 - minutes, seconds = dt.seconds//60, dt.seconds%60 - log.info("Python startup time: {} min {} sec".format(minutes, seconds)) - except ValueError: - log.error("unable to parse $STARTTIME={}".format(os.getenv("STARTTIME"))) - else: - log.info("Python startup time unknown since $STARTTIME not set") - sys.stdout.flush() - - # raw and production locations - - rawdir = os.path.abspath(io.rawdata_root()) - proddir = os.path.abspath(io.specprod_root()) - - if rank == 0: - log.info("Starting at {}".format(time.asctime())) - log.info(" Using raw dir {}".format(rawdir)) - log.info(" Using spectro production dir {}".format(proddir)) - sys.stdout.flush() - - # Get task list from disk or from STDIN - - tasklist = None - if args.taskfile is not None: - # One process reads the file and broadcasts - if rank == 0: - tasklist = pipe.prod.task_read(args.taskfile) - if comm is not None: - tasklist = comm.bcast(tasklist, root=0) - elif args.task is not None: - tasklist = [args.task,] - else: - # Every process has the same STDIN contents. - tasklist = list() - for line in sys.stdin: - tasklist.append(line.rstrip()) - - # Do we actually have any tasks? - if len(tasklist) == 0: - # warnings.warn("Task list is empty", RuntimeWarning) - log.error("Task list is empty") - sys.exit(1) - - # Derive tasktype from tasklist if needed; otherwise filter by tasktype - tasktypes = [tmp.split('_')[0] for tmp in tasklist] - if args.tasktype is None: - if len(set(tasktypes)) == 1: - args.tasktype = tasktypes[0] - else: - log.error("Multiple task types found in input list: {}".format( - set(tasktypes))) - sys.exit(1) - - elif len(set(tasktypes)) > 0: - tasklist = [t for t in tasklist if t.startswith(args.tasktype+'_')] - - # run it! - - (db, opts) = pipe.load_prod("w") - - ntask = len(tasklist) - ready = None - done = None - failed = None - - if args.nodb: - ready, done, failed = pipe.run_task_list(args.tasktype, tasklist, opts, - comm=comm, db=None, force=args.force) - else: - ready, done, failed = pipe.run_task_list(args.tasktype, tasklist, opts, - comm=comm, db=db, force=args.force) - - t2 = datetime.datetime.now() - - if rank == 0: - log.info(" {} tasks already done, {} tasks were ready, and {} failed".format(done, ready, failed)) - dt = t2 - t1 - minutes, seconds = dt.seconds//60, dt.seconds%60 - log.info("Run time: {} min {} sec".format(minutes, seconds)) - sys.stdout.flush() - - if comm is not None: - comm.barrier() - - # Did we have any ready tasks that were not already done? - # Note: if there were no ready tasks, but some were already - # done, then we want to exit with a "0" error code. This will - # allow the calling script to continue with other pipeline steps - # and / or allow other dependent jobs run. - if done == 0: - # nothing is done - if ready == 0: - if rank == 0: - warnings.warn("No tasks were ready or done", RuntimeWarning) - sys.exit(1) - if (failed == ready) and (failed > 1) : - # all tasks failed (and there are more than one) - if rank == 0: - warnings.warn("All tasks that were run failed", RuntimeWarning) - sys.exit(1) - else: - # At least some tasks were done- we return zero so that future - # jobs can run. - if (ready > 0) and (failed == ready): - if rank == 0: - warnings.warn("All tasks that were run failed", RuntimeWarning) - sys.exit(1) - - return diff --git a/deprecated/py/desispec/scripts/qa_exposure.py b/deprecated/py/desispec/scripts/qa_exposure.py deleted file mode 100644 index 867b20ded..000000000 --- a/deprecated/py/desispec/scripts/qa_exposure.py +++ /dev/null @@ -1,90 +0,0 @@ -""" -desispec.scripts.qa_exposure -============================ - -Script for generating QA for a full exposure -""" -from __future__ import absolute_import, division - -from desiutil.log import get_logger -import argparse - -from desispec.qa import __offline_qa_version__ - -def parse(options=None): - parser = argparse.ArgumentParser(description="Generate Exposure Level QA [v{:s}]".format(__offline_qa_version__)) - parser.add_argument('--expid', type=int, required=True, help='Exposure ID') - parser.add_argument('--qatype', type=str, required=False, - help="Type of QA to generate [fiberflat, s2n]") - parser.add_argument('--channels', type=str, help="List of channels to include. Default = b,r,z]") - parser.add_argument('--specprod_dir', type = str, default=None, metavar='PATH', - help='Override default path to processed data.') - parser.add_argument('--qaprod_dir', type=str, default=None, metavar='PATH', - help='Override default path to QA data.') - parser.add_argument('--rebuild', default=False, action="store_true", - help = 'Regenerate the QA files for this exposure?') - parser.add_argument('--qamulti_root', type=str, default=None, - help='Root name for a set of slurped QA files (e.g. mini_qa). Uses $SPECPROD/QA for path') - parser.add_argument('--slurp', type=str, default=None, - help='Root name for slurp QA file to add to (e.g. mini_qa). File must already exist. Uses $SPECPROD/QA for path') - - if options is None: - args = parser.parse_args() - else: - args = parser.parse_args(options) - return args - - -def main(args) : - - from desispec.io import meta - from desispec.qa.qa_plots import exposure_fiberflat, exposure_s2n - from desispec.qa.qa_exposure import QA_Exposure - from desispec.io.meta import find_exposure_night - - log=get_logger() - - log.info("starting") - - # Setup - if args.specprod_dir is None: - specprod_dir = meta.specprod_root() - else: - specprod_dir = args.specprod_dir - if args.qaprod_dir is None: - qaprod_dir = meta.qaprod_root(specprod_dir=specprod_dir) - else: - qaprod_dir = args.qaprod_dir - if args.channels is None: - channels = ['b','r','z'] - else: - channels = [iarg for iarg in args.channels.split(',')] - - # Find night - night = find_exposure_night(args.expid, specprod_dir=specprod_dir) - - # Instantiate - qa_exp = QA_Exposure(args.expid, night, specprod_dir=specprod_dir, - qaprod_dir=qaprod_dir, - no_load=args.rebuild, multi_root=args.qamulti_root) - # Rebuild? - if args.rebuild: - qa_exp.build_qa_data(rebuild=True) - - # Fiber QA - if args.qatype == 'fiberflat': - for channel in channels: - exposure_fiberflat(channel, args.expid, 'meanflux') - - # S/N - if args.qatype == 's2n': - # S2N table - qa_exp.s2n_table() - # Figure time - exposure_s2n(qa_exp, 'resid', specprod_dir=specprod_dir) - - # Slurp into a file? - if args.slurp is not None: - qa_exp.slurp_into_file(args.slurp) - - diff --git a/deprecated/py/desispec/scripts/qa_frame.py b/deprecated/py/desispec/scripts/qa_frame.py deleted file mode 100644 index c7f225fa4..000000000 --- a/deprecated/py/desispec/scripts/qa_frame.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -desispec.scripts.qa_frame -========================= - -Script for generating QA for a single Frame -""" -from __future__ import absolute_import, division - -from desiutil.log import get_logger -import argparse - -from desispec.qa import __offline_qa_version__ - -def parse(options=None): - parser = argparse.ArgumentParser(description="Generate Frame Level QA [v{:s}]".format(__offline_qa_version__)) - parser.add_argument('--frame_file', type = str, required=True, - help='Frame filename (e.g. frame-b1-00000010.fits). Full path is not required nor desired. ') - parser.add_argument('--specprod_dir', type = str, default = None, metavar = 'PATH', - help = 'Override default path ($DESI_SPECTRO_REDUX/$SPECPROD) to processed data.') - parser.add_argument('--make_plots', default=False, action="store_true", - help = 'Generate QA figs too?') - parser.add_argument('--output_dir', type = str, default = None, metavar = 'PATH', - help = 'Override default path for output files') - - - args = None - if options is None: - args = parser.parse_args() - else: - args = parser.parse_args(options) - return args - - -def main(args) : - - from desispec.io import meta - from desispec.qa.qa_frame import qaframe_from_frame - log=get_logger() - - log.info("starting") - if args.specprod_dir is None: - specprod_dir = meta.specprod_root() - qaprod_dir = None - else: - specprod_dir = args.specprod_dir - qaprod_dir = meta.qaprod_root(specprod_dir=specprod_dir) - - # Generate qaframe (and figures?) - qaframe = qaframe_from_frame(args.frame_file, specprod_dir=specprod_dir, make_plots=args.make_plots, - qaprod_dir=qaprod_dir, output_dir=args.output_dir) - - diff --git a/deprecated/py/desispec/scripts/qa_night.py b/deprecated/py/desispec/scripts/qa_night.py deleted file mode 100644 index fd37b9860..000000000 --- a/deprecated/py/desispec/scripts/qa_night.py +++ /dev/null @@ -1,116 +0,0 @@ -""" -desispec.scripts.qa_night -========================= - -Script for analyzing QA from a Night -""" -from __future__ import absolute_import, division - -import argparse - -from desispec.qa import __offline_qa_version__ - -def parse(options=None): - parser = argparse.ArgumentParser(description="Analyze Night Level QA [v{:s}]".format(__offline_qa_version__)) - - #parser.add_argument('--channel_hist', type=str, default=None, - # help='Generate channel histogram(s)') - parser.add_argument('--expid_series', default=False, action='store_true', - help='Generate exposure series plots.') - parser.add_argument('--bright_dark', type=int, default=0, - help='Restrict to bright/dark (flag: 0=all; 1=bright; 2=dark; only used in time_series)') - parser.add_argument('--qaprod_dir', type=str, default=None, help='Path to where QA is generated. Default is qaprod_dir') - parser.add_argument('--specprod_dir', type=str, default=None, help='Path to spectro production folder. Default is specprod_dir') - parser.add_argument('--night', type=str, help='Night; required') - #parser.add_argument('--S2N_plot', default=False, action='store_true', - # help = 'Generate a S/N plot for the production (vs. xaxis)') - #parser.add_argument('--ZP_plot', default=False, action='store_true', - # help = 'Generate a ZP plot for the production (vs. xaxis)') - #parser.add_argument('--xaxis', type=str, default='MJD', help='Specify x-axis for S/N and ZP plots') - - if options is None: - args = parser.parse_args() - else: - args = parser.parse_args(options) - return args - - -def main(args) : - - from desispec.qa import QA_Night - from desiutil.log import get_logger - from desispec.io import meta - from desispec.qa import qa_plots as dqqp - - log=get_logger() - - log.info("starting") - # Initialize - if args.specprod_dir is None: - specprod_dir = meta.specprod_root() - else: - specprod_dir = args.specprod_dir - if args.qaprod_dir is None: - qaprod_dir = meta.qaprod_root(specprod_dir=specprod_dir) - else: - qaprod_dir = args.qaprod_dir - - qa_prod = QA_Night(args.night, specprod_dir=specprod_dir, qaprod_dir=qaprod_dir) - - # Exposure plots - if args.expid_series: - # QATYPE-METRIC - qa_prod.load_data() - # SKYSUB RESID - qatype, metric = 'SKYSUB', 'RESID' #args.expid_series.split('-') - outfile = qaprod_dir+'/QA_{:s}_expid_{:s}-{:s}.png'.format(args.night, qatype, metric) - dqqp.prod_time_series(qa_prod, qatype, metric, outfile=outfile, - bright_dark=args.bright_dark, exposures=True, - night=args.night, horiz_line=0.) - # FLUXCALIB ZP - qatype, metric = 'FLUXCALIB', 'ZP' #args.expid_series.split('-') - outfile = qaprod_dir+'/QA_{:s}_expid_{:s}-{:s}.png'.format(args.night, qatype, metric) - dqqp.prod_time_series(qa_prod, qatype, metric, outfile=outfile, - bright_dark=args.bright_dark, exposures=True, - night=args.night) - - ''' The stuff down here does not work, or has not been tested on Night QA - # Channel histograms - if args.channel_hist is not None: - # imports - from matplotlib.backends.backend_pdf import PdfPages - # - qa_prod.load_data() - outfile = qa_prod.prod_name+'_chist.pdf' - pp = PdfPages(outfile) - # Default? - if args.channel_hist == 'default': - dqqp.prod_channel_hist(qa_prod, 'FIBERFLAT', 'MAX_RMS', pp=pp, close=False) - dqqp.prod_channel_hist(qa_prod, 'SKYSUB', 'RESID', xlim=(-15,15), pp=pp, close=False) - dqqp.prod_channel_hist(qa_prod, 'FLUXCALIB', 'MAX_ZP_OFF', pp=pp, close=False) - # Finish - print("Writing {:s}".format(outfile)) - pp.close() - # plot - if args.S2N_plot: - # Load up - qa_prod.load_data() - qa_prod.load_exposure_s2n() - # Plot - outfile= qaprod_dir+'/QA_S2N_{:s}.png'.format(args.xaxis) - dqqp.prod_avg_s2n(qa_prod, optypes=['ELG', 'LRG', 'QSO'], xaxis=args.xaxis, outfile=outfile) - - # ZP plot - if args.ZP_plot: - # Load up - qa_prod.load_data() - # Plot - outfile= qaprod_dir+'/QA_ZP_{:s}.png'.format(args.xaxis) - dqqp.prod_ZP(qa_prod, xaxis=args.xaxis, outfile=outfile) - - # HTML - if args.html: - html.calib(qaprod_dir=qaprod_dir, specprod_dir=specprod_dir) - html.make_exposures(qaprod_dir=qaprod_dir) - html.toplevel(qaprod_dir=qaprod_dir) - ''' diff --git a/deprecated/py/desispec/scripts/qa_prod.py b/deprecated/py/desispec/scripts/qa_prod.py deleted file mode 100644 index a40ad0f7c..000000000 --- a/deprecated/py/desispec/scripts/qa_prod.py +++ /dev/null @@ -1,142 +0,0 @@ -""" -desispec.scripts.qa_prod -======================== - -Script for generating QA from a Production run -""" -from __future__ import absolute_import, division - -import argparse -import numpy as np - -from desispec.qa import __offline_qa_version__ - -def parse(options=None): - parser = argparse.ArgumentParser(description="Generate/Analyze Production Level QA [v{:s}]".format(__offline_qa_version__)) - - parser.add_argument('--make_frameqa', type = int, default = 0, - help = 'Bitwise flag to control remaking the QA files (1) and figures (2) for each frame in the production') - parser.add_argument('--slurp', default = False, action='store_true', - help = 'slurp production QA files into one per night?') - parser.add_argument('--remove', default = False, action='store_true', - help = 'remove frame QA files?') - parser.add_argument('--clobber', default=False, action='store_true', - help='clobber existing QA files?') - parser.add_argument('--channel_hist', type=str, default=None, - help='Generate channel histogram(s)') - parser.add_argument('--time_series', type=str, default=None, - help='Generate time series plot. Input is QATYPE-METRIC, e.g. SKYSUB-RESID') - parser.add_argument('--bright_dark', type=int, default=0, - help='Restrict to bright/dark (flag: 0=all; 1=bright; 2=dark; only used in time_series)') - parser.add_argument('--html', default = False, action='store_true', - help = 'Generate HTML files?') - parser.add_argument('--qaprod_dir', type=str, default=None, help='Path to where QA is generated. Default is qaprod_dir') - parser.add_argument('--specprod_dir', type=str, default=None, help='Path to spectro production folder. Default is specprod_dir') - parser.add_argument('--night', type=str, default=None, help='Only process this night') - parser.add_argument('--S2N_plot', default=False, action='store_true', - help = 'Generate a S/N plot for the production (vs. xaxis)') - parser.add_argument('--ZP_plot', default=False, action='store_true', - help = 'Generate a ZP plot for the production (vs. xaxis)') - parser.add_argument('--xaxis', type=str, default='MJD', help='Specify x-axis for S/N and ZP plots') - - args = None - if options is None: - args = parser.parse_args() - else: - args = parser.parse_args(options) - return args - - -def main(args) : - - from desispec.qa import QA_Prod - from desispec.qa import html - from desiutil.log import get_logger - from desispec.io import meta - from desispec.qa import qa_plots as dqqp - - log=get_logger() - - log.info("starting") - # Initialize - if args.specprod_dir is None: - specprod_dir = meta.specprod_root() - else: - specprod_dir = args.specprod_dir - if args.qaprod_dir is None: - qaprod_dir = meta.qaprod_root(specprod_dir=specprod_dir) - else: - qaprod_dir = args.qaprod_dir - - qa_prod = QA_Prod(specprod_dir, qaprod_dir=qaprod_dir) - - # Restrict to a nights - restrict_nights = [args.night] if args.night is not None else None - - # Remake Frame QA? - if args.make_frameqa > 0: - log.info("(re)generating QA related to frames") - if (args.make_frameqa % 4) >= 2: - make_frame_plots = True - else: - make_frame_plots = False - # Run - if (args.make_frameqa & 2**0) or (args.make_frameqa & 2**1): - # Allow for restricted nights - qa_prod.make_frameqa(make_plots=make_frame_plots, clobber=args.clobber, - restrict_nights=restrict_nights) - - # Slurp and write? - if args.slurp: - qa_prod.qaexp_outroot = qaprod_dir - qa_prod.slurp_nights(make=(args.make_frameqa > 0), remove=args.remove, write_nights=True, - restrict_nights=restrict_nights) - - # Channel histograms - if args.channel_hist is not None: - # imports - from matplotlib.backends.backend_pdf import PdfPages - # - qa_prod.load_data() - outfile = qa_prod.prod_name+'_chist.pdf' - pp = PdfPages(outfile) - # Default? - if args.channel_hist == 'default': - dqqp.prod_channel_hist(qa_prod, 'FIBERFLAT', 'MAX_RMS', pp=pp, close=False) - dqqp.prod_channel_hist(qa_prod, 'SKYSUB', 'MED_RESID', xlim=(-15,15), pp=pp, close=False) - dqqp.prod_channel_hist(qa_prod, 'FLUXCALIB', 'MAX_ZP_OFF', pp=pp, close=False) - # Finish - print("Writing {:s}".format(outfile)) - pp.close() - - # Time plots - if args.time_series is not None: - # QATYPE-METRIC - qa_prod.load_data() - # Run - qatype, metric = args.time_series.split('-') - outfile= qaprod_dir+'/QA_time_{:s}.png'.format(args.time_series) - dqqp.prod_time_series(qa_prod, qatype, metric, outfile=outfile, bright_dark=args.bright_dark) - - # plot - if args.S2N_plot: - # Load up - qa_prod.load_data() - qa_prod.load_exposure_s2n() - # Plot - outfile= qaprod_dir+'/QA_S2N_{:s}.png'.format(args.xaxis) - dqqp.prod_avg_s2n(qa_prod, optypes=['ELG', 'LRG', 'QSO'], xaxis=args.xaxis, outfile=outfile) - - # ZP plot - if args.ZP_plot: - # Load up - qa_prod.load_data() - # Plot - outfile= qaprod_dir+'/QA_ZP_{:s}.png'.format(args.xaxis) - dqqp.prod_ZP(qa_prod, xaxis=args.xaxis, outfile=outfile) - - # HTML - if args.html: - html.calib(qaprod_dir=qaprod_dir, specprod_dir=specprod_dir) - html.make_exposures(qaprod_dir=qaprod_dir) - html.toplevel(qaprod_dir=qaprod_dir) diff --git a/deprecated/py/desispec/scripts/quicklook.py b/deprecated/py/desispec/scripts/quicklook.py deleted file mode 100644 index 711f17d0e..000000000 --- a/deprecated/py/desispec/scripts/quicklook.py +++ /dev/null @@ -1,146 +0,0 @@ -""" -desispec.scripts.quicklook -========================== - -Command line wrapper for running a QL pipeline - -QuickLook team @Southern Methodist University (SMU) -First version Spring 2016 -Latest revision July 2018 - -Running QuickLook:: - - desi_quicklook -i qlconfig_science.yaml -n 20191001 -c r0 -e 3577 - -This requires having necessary input files and setting the following environment variables:: - - QL_SPEC_DATA: directory containing raw/fibermap files (full path: $QL_SPEC_DATA/night/expid) - QL_SPEC_REDUX: directory for QL output (full path: $QL_SPEC_REDUX/exposures/night/expid) - DESI_CALIBRATION_DATA: directory containing calibration files - -Necessary Quicklook command line arguments:: - - -i,--config_file : path to QL configuration file - -n,--night : night to be processed - -c,--camera : camera to be processed - -e,--expid : exposure ID to be processed - -Optional QuickLook arguments:: - - --rawdata_dir : directory containing raw/fibermap files (overrides $QL_SPEC_DATA) - --specprod_dir : directory for QL output (overrides $QL_SPEC_REDUX) - -Plotting options:: - - -p (including path to plotting configuration file) : generate configured plots - -p (only using -p with no configuration file) : generate QL hardcoded plots -""" - -from __future__ import absolute_import, division, print_function - -from desispec.quicklook import quicklook,qllogger,qlconfig -from desispec.io.meta import findfile -import desispec.image as image -import desispec.frame as frame -import desispec.io.frame as frIO -import desispec.io.image as imIO -from desispec.qproc.qframe import QFrame -from desispec.qproc.io import write_qframe - - -import os,sys -import yaml -import json -import argparse - -def quietDesiLogger(loglvl=20): - from desiutil.log import get_logger - get_logger(level=loglvl) - -def parse(): - """ - Should have either a pre existing config file, or need to generate one using config module - """ - parser=argparse.ArgumentParser(description="Run QL on DESI data") - parser.add_argument("-i", "--config_file", type=str, required=False,help="yaml file containing config dictionary",dest="config") - parser.add_argument("-n","--night", type=str, required=False, help="night for the data") - parser.add_argument("-c", "--camera", type=str, required=False, help= "camera for the raw data") - parser.add_argument("-e","--expid", type=int, required=False, help="exposure id") - parser.add_argument("--psfid", type=int, required=False, help="psf id") - parser.add_argument("--flatid", type=int, required=False, help="flat id") - parser.add_argument("--templateid", type=int, required=False, help="template id") - parser.add_argument("--templatenight", type=int, required=False, help="template night") - parser.add_argument("--rawdata_dir", type=str, required=False, help="rawdata directory. overrides $QL_SPEC_DATA in config") - parser.add_argument("--specprod_dir",type=str, required=False, help="specprod directory, overrides $QL_SPEC_REDUX in config") - parser.add_argument("--singleQA",type=str,required=False,help="choose one QA to run",default=None,dest="singqa") - parser.add_argument("--loglvl",default=20,type=int,help="log level for quicklook (0=verbose, 50=Critical)") - parser.add_argument("-p",dest='qlplots',nargs='?',default='noplots',help="generate QL static plots") - parser.add_argument("--resolution",action='store_true', help="store full resolution information") - args=parser.parse_args() - return args - -def ql_main(args=None): - - from desispec.util import set_backend - _matplotlib_backend = None - set_backend() - from desispec.quicklook import quicklook,qllogger,qlconfig - - if args is None: - args = parse() - - qlog=qllogger.QLLogger(name="QuickLook",loglevel=args.loglvl) - log=qlog.getlog() - - # quiet down DESI logs. We don't want DESI_LOGGER to print messages unless they are important - # initalize singleton with WARNING level - quietDesiLogger(args.loglvl+10) - - if args.config is not None: - #RS: have command line arguments for finding files via old datamodel - psfid=None - if args.psfid: - psfid=args.psfid - flatid=None - if args.flatid: - flatid=args.flatid - templateid=None - if args.templateid: - templateid=args.templateid - templatenight=None - if args.templatenight: - templatenight=args.templatenight - - if args.rawdata_dir: - rawdata_dir = args.rawdata_dir - else: - if 'QL_SPEC_DATA' not in os.environ: - sys.exit("must set ${} environment variable or provide rawdata_dir".format('QL_SPEC_DATA')) - rawdata_dir=os.getenv('QL_SPEC_DATA') - - if args.specprod_dir: - specprod_dir = args.specprod_dir - else: - if 'QL_SPEC_REDUX' not in os.environ: - sys.exit("must set ${} environment variable or provide specprod_dir".format('QL_SPEC_REDUX')) - specprod_dir=os.getenv('QL_SPEC_REDUX') - - log.debug("Running Quicklook using configuration file {}".format(args.config)) - if os.path.exists(args.config): - if "yaml" in args.config: - config=qlconfig.Config(args.config, args.night,args.camera, args.expid, args.singqa, rawdata_dir=rawdata_dir, specprod_dir=specprod_dir,psfid=psfid,flatid=flatid,templateid=templateid,templatenight=templatenight,qlplots=args.qlplots,store_res=args.resolution) - configdict=config.expand_config() - else: - log.critical("Can't open config file {}".format(args.config)) - sys.exit("Can't open config file") - else: - sys.exit("File does not exist: {}".format(args.config)) - else: - sys.exit("Must provide a valid config file. See desispec/data/quicklook for an example") - - pipeline, convdict = quicklook.setup_pipeline(configdict) - res=quicklook.runpipeline(pipeline,convdict,configdict) - log.info("QuickLook Pipeline completed") - -if __name__=='__main__': - ql_main() diff --git a/deprecated/py/desispec/scripts/skysubresid.py b/deprecated/py/desispec/scripts/skysubresid.py deleted file mode 100644 index 3739382e2..000000000 --- a/deprecated/py/desispec/scripts/skysubresid.py +++ /dev/null @@ -1,167 +0,0 @@ -""" -desispec.scripts.skysubresid -============================ - -Script for generating plots on SkySub residuals -""" -from __future__ import absolute_import, division - -from desiutil.log import get_logger -import argparse -import numpy as np - -from desispec.qa import __offline_qa_version__ - -def parse(options=None): - parser = argparse.ArgumentParser(description="Generate QA on Sky Subtraction residuals [v{:s}]".format(__offline_qa_version__)) - parser.add_argument('--expid', type=int, help='Generate exposure plot on given exposure') - parser.add_argument('--channels', type=str, help='List of channels to include') - parser.add_argument('--prod', default=False, action="store_true", help="Results for full production run") - parser.add_argument('--gauss', default=False, action="store_true", help="Expore Gaussianity for full production run") - parser.add_argument('--nights', type=str, help='List of nights to limit prod plots') - parser.add_argument('--skyline', default=False, action="store_true", help="Skyline residuals?") - parser.add_argument('--qaprod_dir', type=str, default=None, help='Path to where QA figure files are generated. Default is qaprod_dir') - - if options is None: - args = parser.parse_args() - else: - args = parser.parse_args(options) - return args - - - -def main(args) : - # imports - import glob - from desispec.io import findfile, makepath - from desispec.io import get_exposures - from desispec.io import get_files, get_nights - from desispec.io import get_reduced_frames - from desispec.io import specprod_root - from desispec.io import qaprod_root - from desispec.qa import utils as qa_utils - import copy - import pdb - - # Init - specprod_dir = specprod_root() - - # Log - log=get_logger() - log.info("starting") - - # Path - if args.qaprod_dir is not None: - qaprod_dir = args.qaprod_dir - else: - qaprod_dir = qaprod_root() - - # Channels - if args.channels is not None: - channels = [iarg for iarg in args.channels.split(',')] - else: - channels = ['b','r','z'] - - # Sky dict - sky_dict = dict(wave=[], skyflux=[], res=[], count=0) - channel_dict = dict(b=copy.deepcopy(sky_dict), - r=copy.deepcopy(sky_dict), - z=copy.deepcopy(sky_dict), - ) - # Nights - if args.nights is not None: - nights = [iarg for iarg in args.nights.split(',')] - else: - nights = None - - # Exposure plot? - if args.expid is not None: - # Nights - if nights is None: - nights = get_nights() - nights.sort() - # Find the exposure - for night in nights: - if args.expid in get_exposures(night, specprod_dir=specprod_dir): - frames_dict = get_files(filetype=str('cframe'), night=night, - expid=args.expid, specprod_dir=specprod_dir) - # Loop on channel - #for channel in ['b','r','z']: - for channel in ['z']: - channel_dict[channel]['cameras'] = [] - for camera, cframe_fil in frames_dict.items(): - if channel in camera: - sky_file = findfile(str('sky'), night=night, camera=camera, - expid=args.expid, specprod_dir=specprod_dir) - wave, flux, res, _ = qa_utils.get_skyres(cframe_fil) - # Append - channel_dict[channel]['wave'].append(wave) - channel_dict[channel]['skyflux'].append(np.log10(np.maximum(flux,1e-1))) - channel_dict[channel]['res'].append(res) - channel_dict[channel]['cameras'].append(camera) - channel_dict[channel]['count'] += 1 - if channel_dict[channel]['count'] > 0: - from desispec.qa.qa_plots import skysub_resid_series # Hidden to help with debugging - skysub_resid_series(channel_dict[channel], 'wave', - outfile=qaprod_dir+'/QA_skyresid_wave_expid_{:d}{:s}.png'.format(args.expid, channel)) - skysub_resid_series(channel_dict[channel], 'flux', - outfile=qaprod_dir+'/QA_skyresid_flux_expid_{:d}{:s}.png'.format(args.expid, channel)) - return - - - # Skyline - if args.skyline: - from desispec.qa.qa_plots import skyline_resid - # Loop on channel - for channel in channels: - cframes = get_reduced_frames(nights=nights, channels=[channel]) - if len(cframes) > 0: - log.info("Loading sky residuals for {:d} cframes".format(len(cframes))) - if len(cframes) == 1: - log.error('len(cframes)==1; starting debugging') - pdb.set_trace() # Need to call differently - else: - sky_wave, sky_flux, sky_res, sky_ivar = qa_utils.get_skyres( - cframes, flatten=False) - # Plot - outfile=args.outdir+'/skyline_{:s}.png'.format(channel) - log.info("Plotting to {:s}".format(outfile)) - skyline_resid(channel, sky_wave, sky_flux, sky_res, sky_ivar, - outfile=outfile) - return - - # Full Prod Plot? - if args.prod: - from desispec.qa.qa_plots import skysub_resid_dual - # Loop on channel - for channel in channels: - cframes = get_reduced_frames(nights=nights, channels=[channel]) - if len(cframes) > 0: - log.info("Loading sky residuals for {:d} cframes".format(len(cframes))) - sky_wave, sky_flux, sky_res, _ = qa_utils.get_skyres(cframes) - # Plot - outfile=qaprod_dir+'/skyresid_prod_dual_{:s}.png'.format(channel) - makepath(outfile) - log.info("Plotting to {:s}".format(outfile)) - skysub_resid_dual(sky_wave, sky_flux, sky_res, outfile=outfile) - return - - # Test sky noise for Gaussianity - if args.gauss: - from desispec.qa.qa_plots import skysub_gauss - # Loop on channel - for channel in channels: - cframes = get_reduced_frames(nights=nights, channels=[channel]) - if len(cframes) > 0: - # Cut down for debugging - #cframes = [cframes[ii] for ii in range(15)] - # - log.info("Loading sky residuals for {:d} cframes".format(len(cframes))) - sky_wave, sky_flux, sky_res, sky_ivar = qa_utils.get_skyres(cframes) - # Plot - log.info("Plotting..") - outfile=qaprod_dir+'/skyresid_prod_gauss_{:s}.png'.format(channel) - makepath(outfile) - skysub_gauss(sky_wave, sky_flux, sky_res, sky_ivar, - outfile=outfile) - return diff --git a/deprecated/py/desispec/test/integration_test.py b/deprecated/py/desispec/test/integration_test.py deleted file mode 100644 index 41520a377..000000000 --- a/deprecated/py/desispec/test/integration_test.py +++ /dev/null @@ -1,313 +0,0 @@ -""" -Run integration tests from pixsim through redshifts - -python -m desispec.test.integration_test -""" -from __future__ import absolute_import, print_function -import sys -import os -import random -import time -import subprocess as sp -import glob -import shutil - -import numpy as np -from astropy.io import fits - -try: - from scipy import constants - C_LIGHT = constants.c/1000.0 -except TypeError: # This can happen during documentation builds. - C_LIGHT = 299792458.0/1000.0 - -from desispec.util import runcmd -import desispec.pipeline as pipe -import desispec.io as io -import desiutil.log as logging - -#- prevent nose from trying to run this test since it takes too long -__test__ = False - - -def check_env(): - """ - Check required environment variables; raise RuntimeException if missing - """ - log = logging.get_logger() - #- template locations - missing_env = False - if 'DESI_BASIS_TEMPLATES' not in os.environ: - log.warning('missing $DESI_BASIS_TEMPLATES needed for simulating spectra') - missing_env = True - elif not os.path.isdir(os.getenv('DESI_BASIS_TEMPLATES')): - log.warning('missing $DESI_BASIS_TEMPLATES directory') - log.warning('e.g. see NERSC:/project/projectdirs/desi/spectro/templates/basis_templates/v2.2') - missing_env = True - - if 'DESI_SPECTRO_CALIB' not in os.environ: - log.warning('missing $DESI_SPECTRO_CALIB needed for preprocessing images and PSF starting point') - missing_env = True - elif not os.path.isdir(os.getenv('DESI_SPECTRO_CALIB')): - log.warning('missing $DESI_SPECTRO_CALIB directory') - log.warning('e.g. see NERSC:/project/projectdirs/desi/spectro/desi_spectro_calib/trunk') - missing_env = True - - for name in ( - 'DESI_SPECTRO_SIM', 'DESI_SPECTRO_REDUX', 'PIXPROD', 'SPECPROD', 'DESIMODEL'): - if name not in os.environ: - log.warning("missing ${0}".format(name)) - missing_env = True - - if missing_env: - log.warning("Why are these needed?") - log.warning(" Simulations written to $DESI_SPECTRO_SIM/$PIXPROD/") - log.warning(" Raw data read from $DESI_SPECTRO_DATA/") - log.warning(" Spectro pipeline output written to $DESI_SPECTRO_REDUX/$SPECPROD/") - log.warning(" Templates are read from $DESI_BASIS_TEMPLATES") - - #- Wait until end to raise exception so that we report everything that - #- is missing before actually failing - if missing_env: - log.critical("missing env vars; exiting without running pipeline") - sys.exit(1) - - #- Override $DESI_SPECTRO_DATA to match $DESI_SPECTRO_SIM/$PIXPROD - os.environ['DESI_SPECTRO_DATA'] = os.path.join(os.getenv('DESI_SPECTRO_SIM'), os.getenv('PIXPROD')) - - -# Simulate raw data - -def sim(night, nspec=5, clobber=False): - """ - Simulate data as part of the integration test. - - Args: - night (str): YEARMMDD - nspec (int, optional): number of spectra to include - clobber (bool, optional): rerun steps even if outputs already exist - - Raises: - RuntimeError if any script fails - """ - log = logging.get_logger() - - # Create input fibermaps, spectra, and pixel-level raw data - - for expid, program in zip([0,1,2], ['flat', 'arc', 'dark']): - cmd = "newexp-random --program {program} --nspec {nspec} --night {night} --expid {expid}".format( - expid=expid, program=program, nspec=nspec, night=night) - fibermap = io.findfile('fibermap', night, expid) - simspec = '{}/simspec-{:08d}.fits'.format(os.path.dirname(fibermap), expid) - inputs = [] - outputs = [fibermap, simspec] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('newexp-random failed for {} exposure {}'.format(program, expid)) - - cmd = "pixsim --nspec {nspec} --night {night} --expid {expid}".format(expid=expid, nspec=nspec, night=night) - inputs = [fibermap, simspec] - outputs = list() - outputs.append(fibermap.replace('fibermap-', 'simpix-')) - outputs.append(io.findfile('raw', night, expid)) - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('pixsim failed for {} exposure {}'.format(program, expid)) - - return - -def run_pipeline_step(tasktype): - """Convenience wrapper to run a pipeline step""" - #- First count the number of tasks that are ready - log = logging.get_logger() - - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="r") - task_count = db.count_task_states(tasktype) - count_string = ', '.join(['{:2d} {}'.format(x[1], x[0]) for x in task_count.items()]) - - nready = task_count['ready'] - if nready > 0: - log.info('{:16s}: {}'.format(tasktype, count_string)) - com = "desi_pipe tasks --tasktypes {tasktype} | grep -v DEBUG | desi_pipe script --shell".format(tasktype=tasktype) - log.info('Running {}'.format(com)) - script = sp.check_output(com, shell=True) - log.info('Running {}'.format(script)) - sp.check_call(script, shell=True) - else: - log.warning('{:16s}: {} -- SKIPPING'.format(tasktype, count_string)) - -def integration_test(night=None, nspec=5, clobber=False): - """Run an integration test from raw data simulations through redshifts - - Args: - night (str, optional): YEARMMDD, defaults to current night - nspec (int, optional): number of spectra to include - clobber (bool, optional): rerun steps even if outputs already exist - - Raises: - RuntimeError if any script fails - - """ - - import argparse - parser = argparse.ArgumentParser(usage = "{prog} [options]") - # parser.add_argument("-i", "--input", type=str, help="input data") - # parser.add_argument("-o", "--output", type=str, help="output data") - parser.add_argument("--skip-psf", action="store_true", help="Skip PSF fitting step") - args = parser.parse_args() - - from desiutil.iers import freeze_iers - freeze_iers() - - log = logging.get_logger() - - # YEARMMDD string, rolls over at noon not midnight - if night is None: - night = "20160726" - - # check for required environment variables - check_env() - - # simulate inputs - sim(night, nspec=nspec, clobber=clobber) - - # raw and production locations - - rawdir = os.path.abspath(io.rawdata_root()) - proddir = os.path.abspath(io.specprod_root()) - - # create production - - if clobber and os.path.isdir(proddir): - shutil.rmtree(proddir) - - dbfile = io.get_pipe_database() - if not os.path.exists(dbfile): - com = "desi_pipe create --db-sqlite" - log.info('Running {}'.format(com)) - sp.check_call(com, shell=True) - else: - log.info("Using pre-existing production database {}".format(dbfile)) - - # Modify options file to restrict the spectral range - - optpath = os.path.join(proddir, "run", "options.yaml") - opts = pipe.prod.yaml_read(optpath) - opts['extract']['specmin'] = 0 - opts['extract']['nspec'] = nspec - opts['psf']['specmin'] = 0 - opts['psf']['nspec'] = nspec - opts['traceshift']['nfibers'] = nspec - pipe.prod.yaml_write(optpath, opts) - - if args.skip_psf: - #- Copy desimodel psf into this production instead of fitting psf - import shutil - for channel in ['b', 'r', 'z']: - refpsf = '{}/data/specpsf/psf-{}.fits'.format( - os.getenv('DESIMODEL'), channel) - nightpsf = io.findfile('psfnight', night, camera=channel+'0') - shutil.copy(refpsf, nightpsf) - for expid in [0,1,2]: - exppsf = io.findfile('psf', night, expid, camera=channel+'0') - shutil.copy(refpsf, exppsf) - - #- Resync database to current state - dbpath = io.get_pipe_database() - db = pipe.load_db(dbpath, mode="w") - db.sync(night) - - # Run the pipeline tasks in order - from desispec.pipeline.tasks.base import default_task_chain - for tasktype in default_task_chain: - #- if we skip psf/psfnight/traceshift, update state prior to extractions - if tasktype == 'traceshift' and args.skip_psf: - db.getready() - run_pipeline_step(tasktype) - - # #----- - # #- Did it work? - # #- (this combination of fibermap, simspec, and redrock is a pain) - expid = 2 - fmfile = io.findfile('fibermap', night=night, expid=expid) - fibermap = io.read_fibermap(fmfile) - simdir = os.path.dirname(fmfile) - simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid) - siminfo = fits.getdata(simspec, 'TRUTH') - try: - elginfo = fits.getdata(simspec, 'TRUTH_ELG') - except: - elginfo = None - - from desimodel.footprint import radec2pix - nside=64 - pixels = np.unique(radec2pix(nside, fibermap['TARGET_RA'], fibermap['TARGET_DEC'])) - - num_missing = 0 - for pix in pixels: - zfile = io.findfile('redrock', groupname=pix) - if not os.path.exists(zfile): - log.error('Missing {}'.format(zfile)) - num_missing += 1 - - if num_missing > 0: - log.critical('{} redrock files missing'.format(num_missing)) - sys.exit(1) - - print() - print("--------------------------------------------------") - print("Pixel True z -> Class z zwarn") - # print("3338p190 SKY 0.00000 -> QSO 1.60853 12 - ok") - for pix in pixels: - zfile = io.findfile('redrock', groupname=pix) - if not os.path.exists(zfile): - log.error('Missing {}'.format(zfile)) - continue - - zfx = fits.open(zfile, memmap=False) - redrock = zfx['REDSHIFTS'].data - for i in range(len(redrock['Z'])): - objtype = redrock['SPECTYPE'][i] - z, zwarn = redrock['Z'][i], redrock['ZWARN'][i] - - j = np.where(fibermap['TARGETID'] == redrock['TARGETID'][i])[0][0] - truetype = siminfo['OBJTYPE'][j] - oiiflux = 0.0 - if truetype == 'ELG': - k = np.where(elginfo['TARGETID'] == redrock['TARGETID'][i])[0][0] - oiiflux = elginfo['OIIFLUX'][k] - - truez = siminfo['REDSHIFT'][j] - dv = C_LIGHT*(z-truez)/(1+truez) - status = None - if truetype == 'SKY' and zwarn > 0: - status = 'ok' - elif truetype == 'ELG' and zwarn > 0 and oiiflux < 8e-17: - status = 'ok ([OII] flux {:.2g})'.format(oiiflux) - elif zwarn == 0: - if truetype == 'LRG' and objtype == 'GALAXY' and abs(dv) < 150: - status = 'ok' - elif truetype == 'ELG' and objtype == 'GALAXY': - if abs(dv) < 150: - status = 'ok' - elif oiiflux < 8e-17: - status = 'ok ([OII] flux {:.2g})'.format(oiiflux) - else: - status = 'OOPS ([OII] flux {:.2g})'.format(oiiflux) - elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750: - status = 'ok' - elif truetype in ('STD', 'FSTD') and objtype == 'STAR': - status = 'ok' - else: - status = 'OOPS' - else: - status = 'OOPS' - print('{0:<8d} {1:4s} {2:8.5f} -> {3:5s} {4:8.5f} {5:4d} - {6}'.format( - pix, truetype, truez, objtype, z, zwarn, status)) - - print("--------------------------------------------------") - - -if __name__ == '__main__': - integration_test() diff --git a/deprecated/py/desispec/test/old_integration_test.py b/deprecated/py/desispec/test/old_integration_test.py deleted file mode 100644 index 6a5268587..000000000 --- a/deprecated/py/desispec/test/old_integration_test.py +++ /dev/null @@ -1,407 +0,0 @@ -""" -Run integration tests from pixsim through redshifts - -python -m desispec.test.old_integration_test -""" - -from __future__ import absolute_import, print_function -import os -import time - -import numpy as np -from astropy.io import fits - -try: - from scipy import constants - C_LIGHT = constants.c/1000.0 -except TypeError: # This can happen during documentation builds. - C_LIGHT = 299792458.0/1000.0 - -from ..util import runcmd -from .. import io -from ..qa import QA_Exposure -from ..database.redshift import get_options, setup_db, load_redrock - -from desiutil.log import get_logger - -#- prevent nose from trying to run this test since it takes too long -__test__ = False - -def check_env(): - """Check required environment variables. - - Raises: - RuntimeError if any script fails - """ - log = get_logger() - #- template locations - missing_env = False - if 'DESI_BASIS_TEMPLATES' not in os.environ: - log.warning('missing $DESI_BASIS_TEMPLATES needed for simulating spectra') - missing_env = True - - if not os.path.isdir(os.getenv('DESI_BASIS_TEMPLATES')): - log.warning('missing $DESI_BASIS_TEMPLATES directory') - log.warning('e.g. see NERSC:/project/projectdirs/desi/spectro/templates/basis_templates/v2.2') - missing_env = True - - for name in ( - 'DESI_SPECTRO_SIM', 'DESI_SPECTRO_REDUX', 'PIXPROD', 'SPECPROD'): - if name not in os.environ: - log.warning("missing ${0}".format(name)) - missing_env = True - - if missing_env: - log.warning("Why are these needed?") - log.warning(" Simulations written to $DESI_SPECTRO_SIM/$PIXPROD/") - log.warning(" Raw data read from $DESI_SPECTRO_DATA/") - log.warning(" Spectro pipeline output written to $DESI_SPECTRO_REDUX/$SPECPROD/") - log.warning(" Templates are read from $DESI_BASIS_TEMPLATES") - log.critical("missing env vars; exiting without running pipeline") - raise RuntimeError("missing env vars; exiting without running pipeline") - - #- Override $DESI_SPECTRO_DATA to match $DESI_SPECTRO_SIM/$PIXPROD - os.environ['DESI_SPECTRO_DATA'] = os.path.join(os.getenv('DESI_SPECTRO_SIM'), os.getenv('PIXPROD')) - - -#- TODO: fix usage of night to be something other than today -def integration_test(night=None, nspec=5, clobber=False): - """Run an integration test from raw data simulations through redshifts. - - Args: - night (str, optional): YEARMMDD, defaults to current night - nspec (int, optional): number of spectra to include - clobber (bool, optional): rerun steps even if outputs already exist - - Raises: - RuntimeError if any script fails - """ - from desiutil.iers import freeze_iers - freeze_iers() - - log = get_logger() - #- YEARMMDD string, rolls over at noon not midnight - #- Simulate 8 years ago, prior to start of survey - if night is None: - night = time.strftime('%Y%m%d', time.localtime(time.time()-12*3600-(8*365*24*3600))) - - #- check for required environment variables - check_env() - - #- parameter dictionary that will later be used for formatting commands - params = dict(night=night, nspec=nspec) - - #----- - #- Input fibermaps, spectra, and pixel-level raw data - # raw_dict = {0: 'flat', 1: 'arc', 2: 'dark'} - programs = ('flat', 'arc', 'dark') - channels = ('b', 'r', 'z') - cameras = ('b0', 'r0', 'z0') - # for expid, program in raw_dict.items(): - for expid, program in enumerate(programs): - cmd = "newexp-random --program {program} --nspec {nspec} --night {night} --expid {expid}".format( - expid=expid, program=program, **params) - - fibermap = io.findfile('fibermap', night, expid) - simspec = '{}/simspec-{:08d}.fits'.format(os.path.dirname(fibermap), expid) - inputs = [] - outputs = [fibermap, simspec] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('pixsim newexp failed for {} exposure {}'.format(program, expid)) - - cmd = "pixsim --nspec {nspec} --night {night} --expid {expid}".format(expid=expid, **params) - inputs = [fibermap, simspec] - outputs = [fibermap.replace('fibermap-', 'simpix-'), ] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('pixsim failed for {} exposure {}'.format(program, expid)) - - #----- - #- Preproc - - for expid, program in enumerate(programs): - rawfile = io.findfile('desi', night, expid) - outdir = os.path.dirname(io.findfile('preproc', night, expid, 'b0')) - cmd = "desi_preproc --cameras b0,r0,z0 --infile {} --outdir {} --ncpu 1".format(rawfile, outdir) - - inputs = [rawfile,] - outputs = list() - for camera in cameras: - outputs.append(io.findfile('preproc', night, expid, camera)) - - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('preproc failed for expid {}'.format(expid)) - - #----- - #- Extract - - waverange = dict(b="3570,5940,1.0", r="5630,7740,1.0", z="7440,9830,1.0") - for expid, program in enumerate(programs): - for ic, channel in enumerate(channels): - pixfile = io.findfile('preproc', night, expid, cameras[ic]) - fiberfile = io.findfile('fibermap', night, expid) - psffile = '{}/data/specpsf/psf-{}.fits'.format(os.getenv('DESIMODEL'), channel) - framefile = io.findfile('frame', night, expid, cameras[ic]) - # cmd = "exspec -i {pix} -p {psf} --specmin 0 --nspec {nspec} -w {wave} -o {frame}".format( - # pix=pixfile, psf=psffile, wave=waverange[channel], frame=framefile, **params) - cmd = "desi_extract_spectra -i {pix} -p {psf} -f {fibermap} --specmin 0 --nspec {nspec} -o {frame}".format( - pix=pixfile, psf=psffile, frame=framefile, fibermap=fiberfile, **params) - - inputs = [pixfile, psffile, fiberfile] - outputs = [framefile,] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('extraction failed for {} expid {}'.format(cameras[ic], expid)) - - #----- - #- Fiber flat - expid = 0 - for ic, channel in enumerate(channels): - framefile = io.findfile('frame', night, expid, cameras[ic]) - fiberflat = io.findfile('fiberflat', night, expid, cameras[ic]) - fibermap = io.findfile('fibermap', night, expid) # for QA - qafile = io.findfile('qa_calib', night, expid, cameras[ic]) - qafig = io.findfile('qa_flat_fig', night, expid, cameras[ic]) - cmd = "desi_compute_fiberflat --infile {frame} --outfile {fiberflat} --qafile {qafile} --qafig {qafig}".format( - frame=framefile, fiberflat=fiberflat, qafile=qafile, qafig=qafig, **params) - inputs = [framefile,fibermap,] - outputs = [fiberflat,qafile,qafig,] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('fiberflat failed for '+cameras[ic]) - - #----- - #- Sky model - flat_expid = 0 - expid = 2 - for ic, channel in enumerate(channels): - framefile = io.findfile('frame', night, expid, cameras[ic]) - fibermap = io.findfile('fibermap', night, expid) - fiberflat = io.findfile('fiberflat', night, flat_expid, cameras[ic]) - skyfile = io.findfile('sky', night, expid, cameras[ic]) - qafile = io.findfile('qa_data', night, expid, cameras[ic]) - qafig = io.findfile('qa_sky_fig', night, expid, cameras[ic]) - cmd="desi_compute_sky --infile {frame} --fiberflat {fiberflat} --outfile {sky} --qafile {qafile} --qafig {qafig}".format( - frame=framefile, fiberflat=fiberflat, sky=skyfile, qafile=qafile, qafig=qafig, **params) - inputs = [framefile, fibermap, fiberflat] - outputs = [skyfile, qafile, qafig,] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('sky model failed for '+cameras[ic]) - - - #----- - #- Fit standard stars - if 'STD_TEMPLATES' in os.environ: - std_templates = os.getenv('STD_TEMPLATES') - else: - std_templates = os.getenv('DESI_ROOT')+'/spectro/templates/star_templates/v1.1/star_templates_v1.1.fits' - - stdstarfile = io.findfile('stdstars', night, expid, spectrograph=0) - flats = list() - frames = list() - skymodels = list() - for ic, channel in enumerate(channels): - frames.append( io.findfile('frame', night, expid, cameras[ic]) ) - flats.append( io.findfile('fiberflat', night, flat_expid, cameras[ic]) ) - skymodels.append( io.findfile('sky', night, expid, cameras[ic]) ) - - frames = ' '.join(frames) - flats = ' '.join(flats) - skymodels = ' '.join(skymodels) - - cmd = """desi_fit_stdstars \ - --frames {frames} \ - --fiberflats {flats} \ - --skymodels {skymodels} \ - --starmodels {std_templates} \ - -o {stdstars}""".format( - frames=frames, flats=flats, skymodels=skymodels, - std_templates=std_templates, stdstars=stdstarfile) - - inputs = [fibermap, std_templates] - outputs = [stdstarfile,] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('fitting stdstars failed') - - - #----- - #- Flux calibration - for ic, channel in enumerate(channels): - framefile = io.findfile('frame', night, expid, cameras[ic]) - fibermap = io.findfile('fibermap', night, expid) - fiberflat = io.findfile('fiberflat', night, flat_expid, cameras[ic]) - skyfile = io.findfile('sky', night, expid, cameras[ic]) - calibfile = io.findfile('calib', night, expid, cameras[ic]) - qafile = io.findfile('qa_data', night, expid, cameras[ic]) - qafig = io.findfile('qa_flux_fig', night, expid, cameras[ic]) - - #- Compute flux calibration vector - cmd = """desi_compute_fluxcalibration \ - --infile {frame} --fiberflat {fiberflat} --sky {sky} \ - --models {stdstars} --outfile {calib} --qafile {qafile} --qafig {qafig}""".format( - frame=framefile, fiberflat=fiberflat, sky=skyfile, - stdstars=stdstarfile, calib=calibfile, qafile=qafile, qafig=qafig - ) - inputs = [framefile, fibermap, fiberflat, skyfile, stdstarfile] - outputs = [calibfile, qafile, qafig] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('flux calibration failed for '+cameras[ic]) - - #- Apply the flux calibration to write a cframe file - cframefile = io.findfile('cframe', night, expid, cameras[ic]) - cmd = """desi_process_exposure \ - --infile {frame} --fiberflat {fiberflat} --sky {sky} --calib {calib} \ - --outfile {cframe}""".format(frame=framefile, fibermap=fibermap, - fiberflat=fiberflat, sky=skyfile, calib=calibfile, cframe=cframefile) - inputs = [framefile, fiberflat, skyfile, calibfile] - outputs = [cframefile, ] - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('combining calibration steps failed for '+cameras[ic]) - - #----- - #- Collate QA - # Collate data QA - program2flavor = dict(arc='arc', flat='flat') - for program in ('dark', 'gray', 'bright', 'elg', 'lrg', 'qso', 'bgs', 'mws'): - program2flavor[program] = 'science' - - expid = 2 - qafile = io.findfile('qa_data_exp', night, expid) - if clobber or not os.path.exists(qafile): - flavor = program2flavor[programs[expid]] - qaexp_data = QA_Exposure(expid, night, flavor) # Removes camera files - io.write_qa_exposure(os.path.splitext(qafile)[0], qaexp_data) - if not os.path.exists(qafile): - raise RuntimeError('FAILED data QA_Exposure({},{}, ...) -> {}'.format(expid, night, qafile)) - # Collate calib QA - calib_expid = [0,1] - for expid in calib_expid: - qafile = io.findfile('qa_calib_exp', night, expid) - if clobber or not os.path.exists(qafile): - qaexp_calib = QA_Exposure(expid, night, programs[expid]) - io.write_qa_exposure(os.path.splitext(qafile)[0], qaexp_calib) - if not os.path.exists(qafile): - raise RuntimeError('FAILED calib QA_Exposure({},{}, ...) -> {}'.format(expid, night, qafile)) - - #----- - #- Regroup cframe -> spectra - expid = 2 - inputs = list() - for camera in cameras: - inputs.append( io.findfile('cframe', night, expid, camera) ) - - outputs = list() - fibermap = io.read_fibermap(io.findfile('fibermap', night, expid)) - from desimodel.footprint import radec2pix - nside=64 - pixels = np.unique(radec2pix(nside, fibermap['TARGET_RA'], fibermap['TARGET_DEC'])) - for pix in pixels: - outputs.append( io.findfile('spectra', groupname=pix) ) - - cmd = "desi_group_spectra" - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('spectra regrouping failed') - - #----- - #- Redshifts! - for pix in pixels: - specfile = io.findfile('spectra', groupname=pix) - redrockfile = io.findfile('redrock', groupname=pix) - inputs = [specfile, ] - outputs = [redrockfile, ] - cmd = "rrdesi {} --outfile {}".format(specfile, redrockfile) - result, success = runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - if not success: - raise RuntimeError('rrdesi failed for healpixel {}'.format(pix)) - - # - # Load redshifts into database - # - options = get_options('--overwrite', '--filename', 'dailytest.db', - os.path.join(os.environ['DESI_SPECTRO_REDUX'], - os.environ['SPECPROD'])) - postgresql = setup_db(options) - load_redrock(options.datapath) - # ztruth QA - # qafile = io.findfile('qa_ztruth', night) - # qafig = io.findfile('qa_ztruth_fig', night) - # cmd = "desi_qa_zfind --night {night} --qafile {qafile} --qafig {qafig} --verbose".format( - # night=night, qafile=qafile, qafig=qafig) - # inputs = [] - # outputs = [qafile, qafig] - # result, success runcmd(cmd, inputs=inputs, outputs=outputs, clobber=clobber) - # if not success: - # raise RuntimeError('redshift QA failed for night '+night) - - #----- - #- Did it work? - #- (this combination of fibermap, simspec, and redrock is a pain) - simdir = os.path.dirname(io.findfile('fibermap', night=night, expid=expid)) - simspec = '{}/simspec-{:08d}.fits'.format(simdir, expid) - siminfo = fits.getdata(simspec, 'TRUTH') - try: - elginfo = fits.getdata(simspec, 'TRUTH_ELG') - except: - elginfo = None - - print() - print("--------------------------------------------------") - print("Pixel True z -> Class z zwarn") - # print("3338p190 SKY 0.00000 -> QSO 1.60853 12 - ok") - for pix in pixels: - redrock = fits.getdata(io.findfile('redrock', groupname=pix)) - for i in range(len(redrock)): - objtype = redrock['SPECTYPE'][i] - z, zwarn = redrock['Z'][i], redrock['ZWARN'][i] - - j = np.where(fibermap['TARGETID'] == redrock['TARGETID'][i])[0][0] - truetype = siminfo['OBJTYPE'][j] - oiiflux = 0.0 - if truetype == 'ELG': - k = np.where(elginfo['TARGETID'] == redrock['TARGETID'][i])[0][0] - oiiflux = elginfo['OIIFLUX'][k] - - truez = siminfo['REDSHIFT'][j] - dv = C_LIGHT*(z-truez)/(1+truez) - if truetype == 'SKY' and zwarn > 0: - status = 'ok' - elif truetype == 'ELG' and zwarn > 0 and oiiflux < 8e-17: - status = 'ok ([OII] flux {:.2g})'.format(oiiflux) - elif zwarn == 0: - if truetype == 'LRG' and objtype == 'GALAXY' and abs(dv) < 150: - status = 'ok' - elif truetype == 'ELG' and objtype == 'GALAXY': - if abs(dv) < 150: - status = 'ok' - elif oiiflux < 8e-17: - status = 'ok ([OII] flux {:.2g})'.format(oiiflux) - else: - status = 'OOPS ([OII] flux {:.2g})'.format(oiiflux) - elif truetype == 'QSO' and objtype == 'QSO' and abs(dv) < 750: - status = 'ok' - elif truetype in ('STD', 'FSTD') and objtype == 'STAR': - status = 'ok' - else: - status = 'OOPS' - else: - status = 'OOPS' - print('{0:<8d} {1:4s} {2:8.5f} -> {3:6s} {4:8.5f} {5:4d} - {6}'.format( - pix, truetype, truez, objtype, z, zwarn, status)) - - print("--------------------------------------------------") - -if __name__ == '__main__': - from sys import exit - status = 0 - try: - integration_test() - except RuntimeError: - status = 1 - exit(status) diff --git a/deprecated/py/desispec/test/test_qa.py b/deprecated/py/desispec/test/test_qa.py deleted file mode 100644 index 6e1f32986..000000000 --- a/deprecated/py/desispec/test/test_qa.py +++ /dev/null @@ -1,318 +0,0 @@ -""" -tests desispec.sky -""" - -import unittest -import pdb - -import numpy as np -import os -from desispec.frame import Frame -#from desispec.qa import QA_Frame, QA_Exposure, QA_Brick, QA_Prod -from desispec.qa.qa_frame import QA_Frame -from desispec.qa.qa_exposure import QA_Exposure -from desispec.qa.qa_brick import QA_Brick -from desispec.qa.qa_prod import QA_Prod -from desispec.qa.qa_night import QA_Night -from desispec.io import write_qa_frame, write_qa_brick, load_qa_frame, write_qa_exposure, findfile, write_frame -from desispec.io import write_fiberflat, specprod_root -from desispec.test.util import get_frame_data, get_calib_from_frame, get_fiberflat_from_frame -#from uuid import uuid4 -from shutil import rmtree - -class TestQA(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.nspec = 6 - cls.nwave = 20 - cls.id = 1 - # Run - cls.nights = ['20160101']*2 + ['20160102']*2 - cls.expids = [1,2,3,4] - cls.cameras = ['b0','b1'] - # Files - cls.files_written = [] - # Paths - os.environ['DESI_SPECTRO_REDUX'] = os.environ['HOME'] - os.environ['SPECPROD'] = 'desi_test_qa' - cls.testDir = specprod_root() - # Files - cls.qafile_brick = cls.testDir+'/brick/3582m005/qa-3582m005.yaml' - cls.flux_pdf = cls.testDir+'/exposures/'+cls.nights[0]+'/{:08d}/qa-flux-{:08d}.pdf'.format(cls.id,cls.id) - cls.frame_pdf = cls.testDir+'/exposures/'+cls.nights[0]+'/{:08d}/qa-frame-{:08d}.pdf'.format(cls.id,cls.id) - # Files for exposure fibermap QA figure - cls.exp_fmap_plot = cls.testDir+'/test_exp_fibermap_plot.png' - - @classmethod - def tearDownClass(cls): - """Cleanup in case tests crashed and left files behind""" - for filename in cls.files_written: - if os.path.exists(filename): - os.remove(filename) - #testpath = os.path.normpath(os.path.dirname(filename)) - #if testpath != '.': - # os.removedirs(testpath) - if os.path.exists(cls.testDir): - rmtree(cls.testDir) - - def _make_frame(self, camera='b0', flavor='science', night=None, expid=None, nspec=3): - # Init - if night is None: - night = self.nights[0] - if expid is None: - expid = self.expids[0] - # Generate - frame = get_frame_data(nspec=nspec) - frame.meta = dict(CAMERA=camera, FLAVOR=flavor, NIGHT=night, EXPID=expid) - if flavor in ('arc', 'flat', 'zero', 'dark'): - frame.fibermap['OBJTYPE'] = 'CAL' - frame.fibermap['DESI_TARGET'] = 0 - - return frame - - def _write_flat_file(self, camera='b0', night=None, expid=None): - # Init - if night is None: - night = self.nights[0] - if expid is None: - expid = self.expids[0] - # Filename - frame_file = findfile('frame', night=night, expid=expid, specprod_dir=self.testDir, camera=camera) - fflat_file = findfile('fiberflat', night=night, expid=expid, specprod_dir=self.testDir, camera=camera) - # Frames - fb = self._make_frame(camera=camera, flavor='flat', nspec=10) - _ = write_frame(frame_file, fb) - self.files_written.append(frame_file) - # Fiberflats - ff = get_fiberflat_from_frame(fb) - write_fiberflat(fflat_file, ff) - self.files_written.append(fflat_file) - # Return - return frame_file, fflat_file - - def _write_flat_files(self): - for expid, night in zip(self.expids, self.nights): - for camera in self.cameras: - self._write_flat_file(camera=camera, night=night, expid=expid) - - def _write_qaframe(self, camera='b0', expid=1, night='20160101', ZPval=24., flavor='science'): - """Write QA data frame files""" - frm = self._make_frame(camera=camera, expid=expid, night=night, flavor=flavor) - qafrm = QA_Frame(frm) - # SKY - qafrm.init_skysub() - qafrm.qa_data['SKYSUB']['METRICS'] = {} - qafrm.qa_data['SKYSUB']['METRICS']['NSKY_FIB'] = 10 - # FLUX - qafrm.init_fluxcalib() - qafrm.qa_data['FLUXCALIB']['METRICS'] = {} - qafrm.qa_data['FLUXCALIB']['METRICS']['ZP'] = ZPval - qafrm.qa_data['FLUXCALIB']['METRICS']['RMS_ZP'] = 0.05 - # Outfile - qafile = findfile('qa_data', night=night, expid=expid, - specprod_dir=self.testDir, camera=camera) - # WRITE - write_qa_frame(qafile, qafrm) - self.files_written.append(qafile) - - # Generate frame too (for QA_Exposure) - frame = self._make_frame(camera=camera, flavor=flavor, night=night, expid=expid) - frame_file = findfile('frame', night=night, expid=expid, specprod_dir=self.testDir, camera=camera) - _ = write_frame(frame_file, frame) - self.files_written.append(frame_file) - # - return qafile - - def _write_qaframes(self, **kwargs): - """ Build the standard set of qaframes - and the accompanying frames for QA_Exposure - - Args: - **kwargs: passed to _write_qaframe - - Returns: - - """ - for expid, night in zip(self.expids, self.nights): - for camera in self.cameras: - self._write_qaframe(camera=camera, expid=expid, night=night, **kwargs) - - def _write_qabrick(self): - """Write a QA data brick file""" - qabrck = QA_Brick() - # REDROCK - qabrck.init_redrock() - qabrck.data['REDROCK']['METRICS'] = {} - qabrck.data['REDROCK']['METRICS']['NFAIL'] = 10 - write_qa_brick(self.qafile_brick, qabrck) - self.files_written.append(self.qafile_brick) - - def test_init_qa_frame(self): - #- Simple Init call - qafrm1 = QA_Frame(self._make_frame(flavor='science')) - assert qafrm1.flavor == 'science' - - def test_init_qa_fiberflat(self): - #- Init FiberFlat dict - qafrm = QA_Frame(self._make_frame(flavor='flat')) - qafrm.init_fiberflat() - assert qafrm.qa_data['FIBERFLAT']['PARAMS']['MAX_RMS'] > 0. - - #- ReInit FiberFlat dict - qafrm.init_fiberflat(re_init=True) - assert qafrm.qa_data['FIBERFLAT']['PARAMS']['MAX_RMS'] > 0. - - def test_init_qa_fluxcalib(self): - #- Init FluxCalib dict - qafrm = QA_Frame(self._make_frame(flavor='science')) - qafrm.init_fluxcalib() - assert qafrm.qa_data['FLUXCALIB']['PARAMS']['MAX_ZP_OFF'] > 0. - - #- ReInit FluxCalib dict - qafrm.init_fluxcalib(re_init=True) - assert qafrm.qa_data['FLUXCALIB']['PARAMS']['MAX_ZP_OFF'] > 0. - - def test_init_qa_skysub(self): - #- Init SkySub dict - qafrm = QA_Frame(self._make_frame(flavor='science')) - qafrm.init_skysub() - assert qafrm.qa_data['SKYSUB']['PARAMS']['PCHI_RESID'] > 0. - - #- ReInit SkySub dict - qafrm.init_skysub(re_init=True) - assert qafrm.qa_data['SKYSUB']['PARAMS']['PCHI_RESID'] > 0. - - def test_qa_frame_write_load_data(self): - # Write - frm0 = self._make_frame() - qafrm0 = QA_Frame(frm0) - # Write - outfile = findfile('qa_data', night=self.nights[0], expid=self.expids[0], - specprod_dir=self.testDir, camera='b0') - write_qa_frame(outfile, qafrm0) - self.files_written.append(outfile) - # Load - qafrm2 = load_qa_frame(outfile, frame_meta=frm0.meta) - assert qafrm2.night == qafrm0.night - - - def test_init_qa_exposure(self): - """Test simple init. - """ - from os import environ - cache_env = {'SPECPROD': None, 'DESI_SPECTRO_REDUX': None} - for k in cache_env: - if k in environ: - cache_env[k] = environ[k] - environ[k] = './' - qaexp = QA_Exposure(1, '20150211', flavor='arc') - self.assertEqual(qaexp.expid, 1) - for k in cache_env: - if cache_env[k] is None: - del environ[k] - else: - environ[k] = cache_env[k] - - def test_qa_exposure_load_write_data(self): - #- Test loading data - self._write_qaframes() - expid, night = self.expids[0], self.nights[0] - qaexp = QA_Exposure(expid, night, specprod_dir=self.testDir) - assert 'b0' in qaexp.data['frames'] - assert 'b1' in qaexp.data['frames'] - assert qaexp.flavor == 'science' - # Write - qafile_exp_file = self.testDir+'/exposures/'+night+'/{:08d}/qa-{:08d}'.format(self.id,self.id) - write_qa_exposure(qafile_exp_file, qaexp) - self.files_written.append(qafile_exp_file) - - def test_exposure_fibermap_plot(self): - from desispec.qa.qa_plots import exposure_fiberflat - self._write_flat_files() - exposure_fiberflat('b', self.expids[0], 'meanflux', outfile=self.exp_fmap_plot) - self.files_written.append(self.exp_fmap_plot) - - """ - # This needs to run as a script for the figure generation to pass Travis.. - def test_qa_exposure_fluxcalib(self): - #- Perform fluxcalib QA on Exposure (including figure) - self._write_qaframes() - qaexp = QA_Exposure(1, self.night, specprod_dir=self.testDir, - flavor='dark') - qaexp.fluxcalib(self.flux_pdf) - """ - - def test_init_qa_brick(self): - #- Simple Init calls - qabrck = QA_Brick(name='tst_brick') - assert qabrck.brick_name == 'tst_brick' - # - qabrck.init_redrock() - assert qabrck.data['REDROCK']['PARAMS']['MAX_NFAIL'] > 0 - - def test_init_qa_prod(self): - self._write_qaframes() - qaprod = QA_Prod(self.testDir) - # Load - qaprod.make_frameqa() - _ = qaprod.slurp_nights(write_nights=True) - qaprod.build_data() - # Build a Table - tbl = qaprod.get_qa_table('FLUXCALIB', 'RMS_ZP') - # Test - assert len(tbl) == 8 - assert tbl['FLAVOR'][0] == 'science' - assert len(qaprod.qa_nights) == 2 - assert '20160101' in qaprod.mexp_dict.keys() - assert isinstance(qaprod.data, dict) - # Load from night JSON QA dicts - qaprod2 = QA_Prod(self.testDir) - qaprod2.load_data() - tbl2 = qaprod.get_qa_table('FLUXCALIB', 'RMS_ZP') - assert len(tbl2) == 8 - - def test_init_qa_night(self): - self._write_qaframes() # Generate a set of science QA frames - night = self.nights[0] - qanight = QA_Night(night, specprod_dir=self.testDir) - # Load - qanight.make_frameqa() - _ = qanight.slurp() - qanight.build_data() - # Build an empty Table - tbl = qanight.get_qa_table('FIBERFLAT', 'CHI2PDF') - assert len(tbl) == 0 - # Build a useful Table - tbl2 = qanight.get_qa_table('FLUXCALIB', 'RMS_ZP') - # Test - assert len(tbl2) == 4 - assert tbl2['FLAVOR'][0] == 'science' - # More tests - assert len(qanight.qa_exps) == 2 - assert night in qanight.mexp_dict.keys() - assert isinstance(qanight.data, dict) - - def test_qa_frame_plot(self): - from desispec.qa import qa_plots - from desispec.qa import qa_frame - # Frame - frame = get_frame_data(500) - # Load calib - fluxcalib = get_calib_from_frame(frame) - # QA Frame - tdict = {} - tdict['20190829'] = {} - dint = 20 - tdict['20190829'][dint] = {} - tdict['20190829'][dint]['flavor'] = 'science' - tdict['20190829'][dint]['b'] = {} - tdict['20190829'][dint]['b']['FLUXCALIB'] = {} - tdict['20190829'][dint]['b']['FLUXCALIB']['METRICS'] = {} - tdict['20190829'][dint]['b']['FLUXCALIB']['METRICS']['BLAH'] = 1 - qaframe = qa_frame.QA_Frame(tdict) - # Plot - qa_plots.frame_fluxcalib(self.frame_pdf, qaframe, frame, fluxcalib) - - def runTest(self): - pass diff --git a/deprecated/py/desispec/test/test_ql.py b/deprecated/py/desispec/test/test_ql.py deleted file mode 100644 index 8d0738e90..000000000 --- a/deprecated/py/desispec/test/test_ql.py +++ /dev/null @@ -1,218 +0,0 @@ -""" -Test capabilities of QuickLook pipeline - -python -m desispec.test.test_ql -""" -import os, sys -import shutil -from uuid import uuid4 -import unittest -import yaml -import numpy as np -from desispec.util import runcmd -from desispec.io.raw import write_raw -from desispec.io import empty_fibermap -from desispec.io.fibermap import write_fibermap -import datetime -import pytz -from importlib import resources - -class TestQL(unittest.TestCase): - @classmethod - def setUp(cls): - cls.program = program = 'dark' - cls.flavor = flavor = 'bias' - cls.night = night = '20150105' - cls.camera = camera = 'r0' - cls.expid = expid = 314 - cls.psfExpid = psfExpid = 313 - cls.flatExpid = flatExpid = 312 - cls.templateExpid = templateExpid = 311 - cls.nspec = nspec = 5 - cls.exptime = exptime = 100 - - #- Setup environment and override default environment variables - - #- python 2.7 location: - cls.topDir = os.path.dirname( # top-level - os.path.dirname( # py/ - os.path.dirname( # desispec/ - os.path.dirname(os.path.abspath(__file__)) # test/ - ) - ) - ) - cls.binDir = os.path.join(cls.topDir,'bin') - if not os.path.isdir(cls.binDir): - #- python 3.x setup.py test location: - cls.topDir = os.path.dirname( # top-level - os.path.dirname( # build/ - os.path.dirname( # lib/ - os.path.dirname( # desispec/ - os.path.dirname(os.path.abspath(__file__)) # test/ - ) - ) - ) - ) - cls.binDir = os.path.join(cls.topDir,'bin') - - #- last attempt - if not os.path.isdir(cls.binDir): - cls.topDir = os.getcwd() - cls.binDir = os.path.join(cls.topDir, 'bin') - - if not os.path.isdir(cls.binDir): - raise RuntimeError('Unable to auto-locate desispec/bin from {}'.format(__file__)) - - id = uuid4().hex - cls.fibermapfile = 'fibermap-'+id+'.fits' - cls.framefile = 'frame-'+id+'.fits' - - cls.testDir = testDir = os.path.join(os.environ['HOME'],'ql_test_io') - datanightDir = os.path.join(testDir,night) - dataDir = os.path.join(datanightDir,'{:08d}'.format(expid)) - expDir = os.path.join(testDir,'exposures') - expnightDir = os.path.join(expDir,night) - reduxDir = os.path.join(expnightDir,'{:08d}'.format(expid)) - calibDir = os.path.join(testDir, 'ql_calib') - configDir = os.path.join(testDir, 'ql_config') - os.environ['QL_CALIB_DIR'] = calibDir - os.environ['QL_CONFIG_DIR'] = configDir - if not os.path.exists(testDir): - os.makedirs(testDir) - os.makedirs(datanightDir) - os.makedirs(dataDir) - os.makedirs(expDir) - os.makedirs(expnightDir) - os.makedirs(reduxDir) - os.makedirs(calibDir) - os.makedirs(configDir) - - #- Write dummy configuration and input files to test merging - configdict = {'name': 'Test Configuration', - 'Program': program, - 'Flavor': flavor, - 'PSFExpid': psfExpid, - 'PSFType': 'psf', - 'FiberflatExpid': flatExpid, - 'TemplateExpid': templateExpid, - 'TemplateNight': night, - 'WritePreprocfile': False, - 'WriteSkyModelfile': False, - 'WriteIntermediatefiles': False, - 'WriteStaticPlots': False, - 'Debuglevel': 20, - 'UseResolution': False, - 'Period': 5.0, - 'Timeout': 120.0, - 'Pipeline': ['Initialize','Preproc'], - 'Algorithms': {'Initialize':{ - 'QA':{'Check_HDUs':{'PARAMS':{}} - }}, - 'Preproc':{ - 'QA':{'Bias_From_Overscan':{'PARAMS':{'BIAS_AMP_NORMAL_RANGE':[-100.0,100.0],'BIAS_AMP_WARN_RANGE':[-200.0,200.0]}}, - 'Get_RMS':{'PARAMS':{'PERCENTILES':[68.2,95.4,99.7],'NOISE_AMP_NORMAL_RANGE':[-1.0,1.0],'NOISE_AMP_WARN_RANGE':[-2.0,2.0]}}, - 'Count_Pixels':{'PARAMS':{'CUTPIX':500,'LITFRAC_NORMAL_RANGE':[-0.1,0.1],'LITFRAC_WARN_RANGE':[-0.2,0.2]}}}}} - } - with open('{}/test_config.yaml'.format(testDir),'w') as config: - yaml.dump(configdict,config) - cls.configfile = '{}/test_config.yaml'.format(testDir) - - #- Generate raw file - rawfile = os.path.join(dataDir,'desi-00000314.fits.fz') - raw_hdr = {} - raw_hdr['DATE-OBS'] = '2015-01-05T08:17:03.988' - raw_hdr['NIGHT'] = night - raw_hdr['PROGRAM'] = program - raw_hdr['FLAVOR'] = flavor - raw_hdr['CAMERA'] = camera - raw_hdr['EXPID'] = expid - raw_hdr['EXPTIME'] = exptime - raw_hdr['DOSVER'] = 'SIM' - raw_hdr['FEEVER'] = 'SIM' - raw_hdr['DETECTOR'] = 'SIM' - raw_hdr['PRESECA'] = '[1:4,1:2048]' - raw_hdr['DATASECA'] = '[5:2052,1:2048]' - raw_hdr['BIASSECA'] = '[2053:2102,1:2048]' - raw_hdr['CCDSECA'] = '[1:2048,1:2048]' - raw_hdr['PRESECB'] = '[4201:4204,1:2048]' - raw_hdr['DATASECB'] = '[2153:4200,1:2048]' - raw_hdr['BIASSECB'] = '[2103:2152,1:2048]' - raw_hdr['CCDSECB'] = '[2049:4096,1:2048]' - raw_hdr['PRESECC'] = '[1:4,2049:4096]' - raw_hdr['DATASECC'] = '[5:2052,2049:4096]' - raw_hdr['BIASSECC'] = '[2053:2102,2049:4096]' - raw_hdr['CCDSECC'] = '[1:2048,2049:4096]' - raw_hdr['PRESECD'] = '[4201:4204,2049:4096]' - raw_hdr['DATASECD'] = '[2153:4200,2049:4096]' - raw_hdr['BIASSECD'] = '[2103:2152,2049:4096]' - raw_hdr['CCDSECD'] = '[2049:4096,2049:4096]' - raw_hdr['GAINA'] = 1.0 - raw_hdr['GAINB'] = 1.0 - raw_hdr['GAINC'] = 1.0 - raw_hdr['GAIND'] = 1.0 - raw_hdr['RDNOISEA'] = 3.0 - raw_hdr['RDNOISEB'] = 3.0 - raw_hdr['RDNOISEC'] = 3.0 - raw_hdr['RDNOISED'] = 3.0 - - primary_header={'PROGRAM':program} - data=np.zeros((4096,4204))+200. - raw_data=data.astype(int) - write_raw(rawfile,raw_data,raw_hdr,primary_header=primary_header) - - #- Generate fibermap file - fibermapfile = os.path.join(dataDir,'fibermap-00000314.fits') - fibermap = empty_fibermap(nspec) - write_fibermap(fibermapfile,fibermap) - - #- Generate calib data - for camera in ['b0', 'r0', 'z0']: - #- Fiberflat has to exist but can be a dummpy file - filename = '{}/fiberflat-{}.fits'.format(calibDir, camera) - fx = open(filename, 'w'); fx.write('fiberflat file'); fx.close() - - #- PSF has to be real file - psffile = '{}/psf-{}.fits'.format(calibDir, camera) - example_psf = resources.files('desispec').joinpath(f'test/data/ql/psf-{camera}.fits') - shutil.copy(example_psf, psffile) - - #- Copy test calibration-data.yaml file - specdir=calibDir+"spec/sp0" - if not os.path.isdir(specdir) : - os.makedirs(specdir) - for c in "brz" : - shutil.copy(str(resources.files('desispec').joinpath(f'test/data/ql/{c}0.yaml')), os.path.join(specdir, f"{c}0.yaml")) - - #- Set calibration environment variable - os.environ['DESI_SPECTRO_CALIB'] = calibDir - - - #- Clean up test files and directories if they exist - @classmethod - def tearDown(cls): - for filename in [cls.fibermapfile,cls.framefile]: - if os.path.exists(filename): - os.remove(filename) - if os.path.exists(cls.testDir): - shutil.rmtree(cls.testDir) - - #- Test if QuickLook outputs merged QA file - #def test_mergeQA(self): - #os.environ['QL_SPEC_REDUX'] = self.testDir - #cmd = "{} {}/desi_quicklook -i {} -n {} -c {} -e {} --rawdata_dir {} --specprod_dir {} --mergeQA".format(sys.executable,self.binDir,self.configfile,self.night,self.camera,self.expid,self.testDir,self.testDir) - #pyver = format(sys.executable.split('anaconda')[1]) - #print('NOTE: Test is running on python v'+format(pyver.split('/')[0])) - - #if int(format(pyver.split('/')[0])) < 3: - #pass - #else: - #if runcmd(cmd) != 0: - #raise RuntimeError('quicklook pipeline failed') - - - def test_QA(self): - os.environ['QL_SPEC_REDUX'] = self.testDir -# cmd = "{} {}/desi_quicklook -i {} -n {} -c {} -e {} --rawdata_dir {} --specprod_dir {} ".format(sys.executable,self.binDir,self.configfile,self.night,self.camera,self.expid,self.testDir,self.testDir) - -# if runcmd(cmd) != 0: -# raise RuntimeError('quicklook pipeline failed') diff --git a/deprecated/py/desispec/test/test_ql_pa.py b/deprecated/py/desispec/test/test_ql_pa.py deleted file mode 100644 index 31471d5a5..000000000 --- a/deprecated/py/desispec/test/test_ql_pa.py +++ /dev/null @@ -1,150 +0,0 @@ -""" -tests for Quicklook Pipeline steps in desispec.quicklook.procalgs -""" - -import unittest -import numpy as np -import os -import shutil -import desispec -from desispec.quicklook import procalgs as PA -from importlib import resources -from desispec.test.test_ql_qa import xy2hdr -from desispec.preproc import parse_sec_keyword -import astropy.io.fits as fits -from desispec.quicklook import qllogger - -qlog=qllogger.QLLogger("QuickLook",0) -log=qlog.getlog() - -class TestQL_PA(unittest.TestCase): - - def tearDown(self): - self.rawimage.close() - for filename in [self.rawfile, self.pixfile]: - if os.path.exists(filename): - os.remove(filename) - if os.path.exists(self.testDir): - shutil.rmtree(self.testDir) - - #- Create some test data - def setUp(self): - - #- Create temporary calib directory - self.testDir = os.path.join(os.environ['HOME'], 'ql_test_io') - calibDir = os.path.join(self.testDir, 'ql_calib') - if not os.path.exists(calibDir): os.makedirs(calibDir) - - #- Generate calib data - for camera in ['b0', 'r0', 'z0']: - #- Fiberflat has to exist but can be a dummpy file - filename = '{}/fiberflat-{}.fits'.format(calibDir, camera) - fx = open(filename, 'w'); fx.write('fiberflat file'); fx.close() - - #- PSF has to be real file - psffile = '{}/psf-{}.fits'.format(calibDir, camera) - example_psf = str(resources.files('desispec').joinpath(f'test/data/ql/psf-{camera}.fits')) - shutil.copy(example_psf, psffile) - - #- Copy test calibration-data.yaml file - specdir=calibDir+"spec/sp0" - if not os.path.isdir(specdir) : - os.makedirs(specdir) - for c in "brz" : - shutil.copy(str(resources.files('desispec').joinpath(f'test/data/ql/{c}0.yaml')), os.path.join(specdir, f"{c}0.yaml")) - - #- Set calibration environment variable - os.environ['DESI_SPECTRO_CALIB'] = calibDir - - self.rawfile = os.path.join(self.testDir,'test-raw-abcd.fits') - self.pixfile = os.path.join(self.testDir,'test-pix-abcd.fits') - self.config={} - - #- rawimage - - hdr = dict() - hdr['CAMERA'] = 'b0' - hdr['DATE-OBS'] = '2018-09-23T08:17:03.988' - - #- Dimensions per amp, not full 4-quad CCD - ny = self.ny = 500 - nx = self.nx = 400 - noverscan = nover = 50 - - hdr['BIASSEC1'] = xy2hdr(np.s_[0:ny, nx:nx+nover]) - hdr['DATASEC1'] = xy2hdr(np.s_[0:ny, 0:nx]) - hdr['CCDSEC1'] = xy2hdr(np.s_[0:ny, 0:nx]) - - hdr['BIASSEC2'] = xy2hdr(np.s_[0:ny, nx+nover:nx+2*nover]) - hdr['DATASEC2'] = xy2hdr(np.s_[0:ny, nx+2*nover:nx+2*nover+nx]) - hdr['CCDSEC2'] = xy2hdr(np.s_[0:ny, nx:nx+nx]) - - hdr['BIASSEC3'] = xy2hdr(np.s_[ny:ny+ny, nx:nx+nover]) - hdr['DATASEC3'] = xy2hdr(np.s_[ny:ny+ny, 0:nx]) - hdr['CCDSEC3'] = xy2hdr(np.s_[ny:ny+ny, 0:nx]) - - - hdr['BIASSEC4'] = xy2hdr(np.s_[ny:ny+ny, nx+nover:nx+2*nover]) - hdr['DATASEC4'] = xy2hdr(np.s_[ny:ny+ny, nx+2*nover:nx+2*nover+nx]) - hdr['CCDSEC4'] = xy2hdr(np.s_[ny:ny+ny, nx:nx+nx]) - - hdr['NIGHT'] = '20180923' - hdr['EXPID'] = 1 - hdr['FLAVOR']='dark' - - rawimage = np.zeros((2*ny, 2*nx+2*noverscan)) - offset = {'1':100.0, '2':100.5, '3':50.3, '4':200.4} - gain = {'1':1.0, '2':1.5, '3':0.8, '4':1.2} - rdnoise = {'1':2.0, '2':2.2, '3':2.4, '4':2.6} - - quad = { - '1': np.s_[0:ny, 0:nx], '2': np.s_[0:ny, nx:nx+nx], - '3': np.s_[ny:ny+ny, 0:nx], '4': np.s_[ny:ny+ny, nx:nx+nx], - } - - for amp in ('1', '2', '3', '4'): - - hdr['GAIN'+amp] = gain[amp] - hdr['RDNOISE'+amp] = rdnoise[amp] - - xy = parse_sec_keyword(hdr['BIASSEC'+amp]) - shape = [xy[0].stop-xy[0].start, xy[1].stop-xy[1].start] - rawimage[xy] += offset[amp] - rawimage[xy] += np.random.normal(scale=rdnoise[amp], size=shape)/gain[amp] - xy = parse_sec_keyword(hdr['DATASEC'+amp]) - shape = [xy[0].stop-xy[0].start, xy[1].stop-xy[1].start] - rawimage[xy] += offset[amp] - rawimage[xy] += np.random.normal(scale=rdnoise[amp], size=shape)/gain[amp] - #- raw data are integers, not floats - rawimg = rawimage.astype(np.int32) - self.expid=hdr["EXPID"] - self.camera=hdr["CAMERA"] - #- Confirm that all regions were correctly offset - assert not np.any(rawimage == 0.0) - - hdr['DOSVER'] = 'SIM' - hdr['FEEVER'] = 'SIM' - hdr['DETECTOR'] = 'SIM' - desispec.io.write_raw(self.rawfile,rawimg,hdr,camera=self.camera) - self.rawimage=fits.open(self.rawfile) - - - - #- Individual tests already exist in offline tests. So we will mostly test the call etc. here -# def testPreproc(self): -# pa=PA.Preproc('Preproc',self.config,logger=log) -# log.info("Test preproc") -# inp=self.rawimage -# rawshape=inp[self.camera.upper()].data.shape -# bias=np.zeros(rawshape) -# pixflat=np.ones(rawshape) -# mask = np.random.randint(0, 2, size=(1000,800)) -# pargs={} -# pargs["camera"]=self.camera -# pargs["Bias"]=bias -# pargs["PixFlat"]=pixflat -# pargs["Mask"]=mask -# pargs["DumpIntermediates"]=True -# pargs["dumpfile"]=self.pixfile -# img=pa(inp,**pargs) -# self.assertTrue(np.all(img.mask == mask)) diff --git a/deprecated/py/desispec/test/test_ql_qa.py b/deprecated/py/desispec/test/test_ql_qa.py deleted file mode 100644 index 64d11e6fd..000000000 --- a/deprecated/py/desispec/test/test_ql_qa.py +++ /dev/null @@ -1,537 +0,0 @@ -""" -tests for Quicklook QA class and functions. It also indludes tests on low level functions on desispec.qa.qalib -""" - -import unittest -import shutil -import tempfile -import numpy as np -import os -from desispec.qa import qalib -from desispec.qa import qa_quicklook as QA -from importlib import resources -import desispec.sky -from desispec.preproc import parse_sec_keyword -from specter.psf import load_psf -import astropy.io.fits as fits -from desispec.quicklook import qllogger -import desispec.io -import desispec.image -from desitarget.targetmask import desi_mask - -qlog=qllogger.QLLogger("QuickLook",0) -log=qlog.getlog() - -def xy2hdr(xyslice): - ''' - convert 2D slice into IRAF style [a:b,c:d] hdr value - - e.g. xyslice2hdr(np.s_[0:10, 5:20]) -> '[6:20,1:10]' - ''' - yy, xx = xyslice - value = '[{}:{},{}:{}]'.format(xx.start+1, xx.stop, yy.start+1, yy.stop) - return value - -#- 2D gaussian function to model sky peaks -def gaussian2D(x,y,amp,xmu,ymu,xsigma,ysigma): - x,y = np.meshgrid(x,y) - gauss = amp*np.exp(-(x-xmu)**2/(2*xsigma**2)-(y-ymu)**2/(2*ysigma**2)) - return gauss - -class TestQL_QA(unittest.TestCase): - - @classmethod - def setUpClass(cls): - """Create test filenames in a unique temporary directory - """ - cls.testDir = tempfile.mkdtemp() - cls.rawfile = os.path.join(cls.testDir, 'test-raw-abcde.fits') - cls.pixfile = os.path.join(cls.testDir, 'test-pix-abcde.fits') - cls.xwfile = os.path.join(cls.testDir, 'test-xw-abcde.fits') - cls.framefile = os.path.join(cls.testDir, 'test-frame-abcde.fits') - cls.fibermapfile = os.path.join(cls.testDir, 'test-fibermap-abcde.fits') - cls.skyfile = os.path.join(cls.testDir, 'test-sky-abcde.fits') - cls.qafile = os.path.join(cls.testDir, 'test_qa.yaml') - cls.qajson = os.path.join(cls.testDir, 'test_qa.json') - cls.qafig = os.path.join(cls.testDir, 'test_qa.png') - - @classmethod - def tearDownClass(cls): - """Cleanup temporary directory - """ - shutil.rmtree(cls.testDir) - - def tearDown(self): - self.rawimage.close() - for filename in [self.framefile, self.rawfile, self.pixfile, self.xwfile, self.fibermapfile, self.skyfile, self.qafile, self.qajson, self.qafig]: - if os.path.exists(filename): - os.remove(filename) - - #- Create some test data - def setUp(self): - #- use specter psf for this test - self.psffile = resources.files('specter').joinpath('test/t/psf-monospot.fits') - #self.psffile=os.environ['DESIMODEL']+'/data/specpsf/psf-b.fits' - self.config={"kwargs":{ - "refKey":None, - "param":{}, - "qso_resid":None - }} - - #- rawimage - - hdr = dict() - hdr['CAMERA'] = 'z1' - hdr['DATE-OBS'] = '2018-09-23T08:17:03.988' - hdr['PROGRAM'] = 'dark' - hdr['EXPTIME'] = 100 - - #- Dimensions per amp - ny = self.ny = 500 - nx = self.nx = 400 - noverscan = nover = 50 - - hdr['BIASSECA'] = xy2hdr(np.s_[0:ny, nx:nx+nover]) - hdr['DATASECA'] = xy2hdr(np.s_[0:ny, 0:nx]) - hdr['CCDSECA'] = xy2hdr(np.s_[0:ny, 0:nx]) - - hdr['BIASSECB'] = xy2hdr(np.s_[0:ny, nx+nover:nx+2*nover]) - hdr['DATASECB'] = xy2hdr(np.s_[0:ny, nx+2*nover:nx+2*nover+nx]) - hdr['CCDSECB'] = xy2hdr(np.s_[0:ny, nx:nx+nx]) - - hdr['BIASSECC'] = xy2hdr(np.s_[ny:ny+ny, nx:nx+nover]) - hdr['DATASECC'] = xy2hdr(np.s_[ny:ny+ny, 0:nx]) - hdr['CCDSECC'] = xy2hdr(np.s_[ny:ny+ny, 0:nx]) - - hdr['BIASSECD'] = xy2hdr(np.s_[ny:ny+ny, nx+nover:nx+2*nover]) - hdr['DATASECD'] = xy2hdr(np.s_[ny:ny+ny, nx+2*nover:nx+2*nover+nx]) - hdr['CCDSECD'] = xy2hdr(np.s_[ny:ny+ny, nx:nx+nx]) - - hdr['NIGHT'] = '20180923' - hdr['EXPID'] = 1 - hdr['PROGRAM'] = 'dark' - hdr['FLAVOR'] = 'science' - hdr['EXPTIME'] = 100.0 - - rawimage = np.zeros((2*ny, 2*nx+2*noverscan)) - offset = {'A':100.0, 'B':100.5, 'C':50.3, 'D':200.4} - gain = {'A':1.0, 'B':1.5, 'C':0.8, 'D':1.2} - rdnoise = {'A':2.0, 'B':2.2, 'C':2.4, 'D':2.6} - obsrdn = {'A':3.4, 'B':3.3, 'C':3.6, 'D':3.3} - - quad = { - 'A': np.s_[0:ny, 0:nx], 'B': np.s_[0:ny, nx:nx+nx], - 'C': np.s_[ny:ny+ny, 0:nx], 'D': np.s_[ny:ny+ny, nx:nx+nx], - } - - for amp in ('A', 'B', 'C', 'D'): - - hdr['GAIN'+amp] = gain[amp] - hdr['RDNOISE'+amp] = rdnoise[amp] - hdr['OBSRDN'+amp] = obsrdn[amp] - - xy = parse_sec_keyword(hdr['BIASSEC'+amp]) - shape = [xy[0].stop-xy[0].start, xy[1].stop-xy[1].start] - rawimage[xy] += offset[amp] - rawimage[xy] += np.random.normal(scale=rdnoise[amp], size=shape)/gain[amp] - xy = parse_sec_keyword(hdr['DATASEC'+amp]) - shape = [xy[0].stop-xy[0].start, xy[1].stop-xy[1].start] - rawimage[xy] += offset[amp] - rawimage[xy] += np.random.normal(scale=rdnoise[amp], size=shape)/gain[amp] - - #- set CCD parameters - self.ccdsec1=hdr["CCDSECA"] - self.ccdsec2=hdr["CCDSECB"] - self.ccdsec3=hdr["CCDSECC"] - self.ccdsec4=hdr["CCDSECD"] - - #- raw data are integers, not floats - rawimg = rawimage.astype(np.int32) - self.expid=hdr["EXPID"] - self.camera=hdr["CAMERA"] - #- Confirm that all regions were correctly offset - assert not np.any(rawimage == 0.0) - - #- write to the rawfile and read it in QA test - hdr['DOSVER'] = 'SIM' - hdr['FEEVER'] = 'SIM' - hdr['DETECTOR'] = 'SIM' - - desispec.io.write_raw(self.rawfile,rawimg,hdr,camera=self.camera) - self.rawimage=fits.open(self.rawfile) - - #- read psf, should use specter.PSF.load_psf instead of desispec.PSF(), otherwise need to create a psfboot somewhere. - - self.psf = load_psf(self.psffile) - - #- make the test pixfile, fibermap file - img_pix = rawimg - img_ivar = np.ones_like(img_pix) / 3.0**2 - img_mask = np.zeros(img_pix.shape, dtype=np.uint32) - img_mask[200] = 1 - - self.image = desispec.image.Image(img_pix, img_ivar, img_mask, camera='z1',meta=hdr) - desispec.io.write_image(self.pixfile, self.image) - - #- Create a fibermap with purposefully overlapping targeting bits - n = 30 - self.fibermap = desispec.io.empty_fibermap(n) - self.fibermap['OBJTYPE'][:] = 'TGT' - self.fibermap['DESI_TARGET'][::2] |= desi_mask.ELG - self.fibermap['DESI_TARGET'][::5] |= desi_mask.QSO - self.fibermap['DESI_TARGET'][::7] |= desi_mask.LRG - - #- add some arbitrary fluxes - for key in ['FLUX_G', 'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2']: - self.fibermap[key] = 10**((22.5 - np.random.uniform(18, 21, size=n))/2.5) - - #- Make some standards; these still have OBJTYPE = 'TGT' - ii = [6,18,29] - self.fibermap['DESI_TARGET'][ii] = desi_mask.STD_FAINT - - #- set some targets to SKY - ii = self.skyfibers = [5,10,21] - self.fibermap['OBJTYPE'][ii] = 'SKY' - self.fibermap['DESI_TARGET'][ii] = desi_mask.SKY - for key in ['FLUX_G', 'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2']: - self.fibermap[key][ii] = np.random.normal(scale=100, size=len(ii)) - - desispec.io.write_fibermap(self.fibermapfile, self.fibermap) - - #- make a test frame file - self.night=hdr['NIGHT'] - self.nspec = nspec = 30 - wave=np.arange(7600.0,9800.0,1.0) #- z channel - nwave = self.nwave = len(wave) - flux=np.random.uniform(size=(nspec,nwave))+100. - ivar=np.ones_like(flux) - resolution_data=np.ones((nspec,13,nwave)) - self.frame=desispec.frame.Frame(wave,flux,ivar,resolution_data=resolution_data,fibermap=self.fibermap) - self.frame.meta = hdr - self.frame.meta['WAVESTEP']=0.5 - desispec.io.write_frame(self.framefile, self.frame) - - #- make a skymodel - sky=np.ones_like(self.frame.flux)*0.5 - skyivar=np.ones_like(sky) - self.mask=np.zeros(sky.shape,dtype=np.uint32) - self.skymodel=desispec.sky.SkyModel(wave,sky,skyivar,self.mask) - self.skyfile=desispec.io.write_sky(self.skyfile,self.skymodel) - - #- Make a dummy boundary map for wavelength-flux in pixel space - self.map2pix={} - self.map2pix["LEFT_MAX_FIBER"] = 14 - self.map2pix["RIGHT_MIN_FIBER"] = 17 - self.map2pix["BOTTOM_MAX_WAVE_INDEX"] = 900 - self.map2pix["TOP_MIN_WAVE_INDEX"] = 1100 - - #- test some qa utility functions: - def test_ampregion(self): - pixboundary=qalib.ampregion(self.image) - self.assertEqual(pixboundary[0][1],slice(0,self.nx,None)) - self.assertEqual(pixboundary[3][0],slice(self.ny,self.ny+self.ny,None)) - - def test_fiducialregion(self): - leftmax,rightmin,bottommax,topmin=qalib.fiducialregion(self.frame,self.psf) - self.assertEqual(leftmax,self.nspec-1) #- as only 30 spectra defined - self.assertLess(bottommax,topmin) - - def test_getrms(self): - img_rms=qalib.getrms(self.image.pix) - self.assertEqual(img_rms,np.std(self.image.pix)) - - def test_countpix(self): - pix=self.image.pix - counts1=qalib.countpix(pix,nsig=3) #- counts above 3 sigma - counts2=qalib.countpix(pix,nsig=4) #- counts above 4 sigma - self.assertLess(counts2,counts1) - -# RS: remove this test because this QA isn't used -# def test_sky_resid(self): -# import copy -# param = dict( -# PCHI_RESID=0.05,PER_RESID=95.,BIN_SZ=0.1) -# qadict=qalib.sky_resid(param,self.frame,self.skymodel,quick_look=True) -# kk=np.where(self.frame.fibermap['OBJTYPE']=='SKY')[0] -# self.assertEqual(qadict['NSKY_FIB'],len(kk)) -# -# #- run with different sky flux -# skym1=desispec.sky.SkyModel(self.frame.wave,self.skymodel.flux,self.skymodel.ivar,self.mask) -# skym2=desispec.sky.SkyModel(self.frame.wave,self.skymodel.flux*0.5,self.skymodel.ivar,self.mask) -# frame1=copy.deepcopy(self.frame) -# frame2=copy.deepcopy(self.frame) -# desispec.sky.subtract_sky(frame1,skym1) -# desispec.sky.subtract_sky(frame2,skym2) -# -# qa1=qalib.sky_resid(param,frame1,skym1) -# qa2=qalib.sky_resid(param,frame2,skym2) -# self.assertLess(qa1['RESID'],qa2['RESID']) #- residuals must be smaller for case 1 - - def testSignalVsNoise(self): - import copy - params=None - #- first get the sky subtracted frame - #- copy frame not to override - thisframe=copy.deepcopy(self.frame) - desispec.sky.subtract_sky(thisframe,self.skymodel) - qadict=qalib.SignalVsNoise(thisframe,params) - #- make sure all the S/N is positive - self.assertTrue(np.all(qadict['MEDIAN_SNR']) > 0) - - #- Reduce sky - skym1=desispec.sky.SkyModel(self.frame.wave,self.skymodel.flux,self.skymodel.ivar,self.mask) - skym2=desispec.sky.SkyModel(self.frame.wave,self.skymodel.flux*0.5,self.skymodel.ivar,self.mask) - frame1=copy.deepcopy(self.frame) - frame2=copy.deepcopy(self.frame) - desispec.sky.subtract_sky(frame1,skym1) - desispec.sky.subtract_sky(frame2,skym2) - qa1=qalib.SignalVsNoise(frame1,params) - qa2=qalib.SignalVsNoise(frame2,params) - self.assertTrue(np.all(qa2['MEDIAN_SNR'] > qa1['MEDIAN_SNR'])) - - #- test for tracer not present - nullfibermap=desispec.io.empty_fibermap(10) - qa=qalib.SignalVsNoise(self.frame,params) - - self.assertEqual(len(qa['MEDIAN_SNR']),30) - - #- Test each individual QA: - def testBiasOverscan(self): - return - qa=QA.Bias_From_Overscan('bias',self.config) #- initialize with fake config and name - inp=self.rawimage - qargs={} - qargs["RESULTKEY"] = 'BIAS_AMP' - qargs["PSFFile"]=self.psf - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["amps"]=True - qargs["qafile"]=self.qafile - qargs["qafig"]=self.qafig - qargs["paname"]="abc" - qargs["singleqa"]=None - res1=qa(inp,**qargs) - self.assertEqual(len(res1['METRICS']['BIAS_AMP']),4) - - def testGetRMS(self): - return - qa=QA.Get_RMS('rms',self.config) - inp=self.image - qargs={} - qargs["RESULTKEY"] = 'NOISE_AMP' - qargs["PSFFile"]=self.psf - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["amps"]=True - qargs["paname"]="abc" - qargs["qafile"]=self.qafile - qargs["qafig"]=self.qafig - qargs["singleqa"]=None - qargs["param"]={'PERCENTILES': [68.2,95.4,99.7], 'NOISE_AMP_NORMAL_RANGE': [-1.0, 1.0], 'NOISE_AMP_WARN_RANGE': [-2.0, 2.0]} - resl=qa(inp,**qargs) - self.assertTrue("yaml" in qargs["qafile"]) - self.assertTrue("png" in qargs["qafig"]) - self.assertTrue(len(resl['METRICS']['NOISE_AMP'])==4) - self.assertTrue((np.all(resl['METRICS']['NOISE_AMP'])>0)) - - def testCalcXWSigma(self): - return - #- Create another pix file for xwsigma test - xw_hdr = dict() - xw_hdr['CAMERA'] = self.camera - xw_hdr['NIGHT'] = self.night - xw_hdr['EXPID'] = self.expid - xw_hdr['PROGRAM'] = 'dark' - xw_hdr['FLAVOR'] = 'science' - - xw_ny = 2000 - xw_nx = 2000 - xw_rawimage = np.zeros((2*xw_ny,2*xw_nx)) - xw_img_pix = xw_rawimage.astype(np.int32) - xw_img_ivar = np.ones_like(xw_img_pix)/3.0**2 - xw_img_mask = np.zeros(xw_img_pix.shape,dtype=np.uint32) - - #- manually insert gaussian sky peaks - x = np.arange(7) - y = np.arange(7) - a = 10000. - xmu = np.mean(x) - ymu = np.mean(y) - xsigma = 1.0 - ysigma = 1.0 - peak_counts = np.rint(gaussian2D(x,y,a,xmu,ymu,xsigma,ysigma)) - peak_counts = peak_counts.astype(np.int32) - zpeaks = np.array([8401.5,8432.4,8467.5,9479.4]) - fibers = np.arange(30) - for i in range(len(zpeaks)): - pix = np.rint(self.psf.xy(fibers,zpeaks[i])) - for j in range(len(fibers)): - for k in range(len(peak_counts)): - ypix = int(pix[0][j]-3+k) - xpix_start =int(pix[1][j]-3) - xpix_stop = int(pix[1][j]+4) - xw_img_pix[ypix][xpix_start:xpix_stop] = peak_counts[k] - - #- transpose pixel values to correct place in image - xw_img_pix=np.ndarray.transpose(xw_img_pix) - - #- write the test pixfile, fibermap file - xwimage = desispec.image.Image(xw_img_pix, xw_img_ivar, xw_img_mask, camera='z1',meta=xw_hdr) - desispec.io.write_image(self.xwfile, xwimage) - - qa=QA.Calc_XWSigma('xwsigma',self.config) - inp=xwimage - qargs={} - qargs["RESULTKEY"] = 'XWSIGMA' - qargs["Flavor"]='science' - qargs["PSFFile"]=self.psffile - qargs["FiberMap"]=self.fibermap - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["amps"]=False - qargs["paname"]="abc" - qargs["qafile"]=self.qafile - qargs["qafig"]=self.qafig - qargs["singleqa"]=None - - qargs["param"]={'B_PEAKS': [3914.4, 5199.3, 5578.9],'R_PEAKS': [6301.9, 6365.4, 7318.2, 7342.8, 7371.3],'Z_PEAKS': [8401.5, 8432.4, 8467.5, 9479.4],'PIXEL_RANGE': 7,'XWSIGMA_NORMAL_RANGE': [-2.0, 2.0],'XWSIGMA_WARN_RANGE': [-4.0, 4.0]} - resl=qa(inp,**qargs) - self.assertTrue(len(resl["METRICS"]["XWSIGMA"].ravel())==2) - self.assertTrue("yaml" in qargs["qafile"]) - self.assertTrue("png" in qargs["qafig"]) - self.assertTrue(len(resl['METRICS']['XWSIGMA'])==4) - self.assertTrue((np.all(resl['METRICS']['XWSIGMA'])>0)) - - def testCountPixels(self): - return - qa=QA.Count_Pixels('countpix',self.config) - inp=self.image - qargs={} - qargs["RESULTKEY"] = 'LITFRAC_AMP' - qargs["PSFFile"]=self.psf - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["amps"]=False - qargs["paname"]="abc" - qargs["singleqa"]=None - qargs["param"]={'CUTPIX': 5, 'LITFRAC_NORMAL_RANGE': [-0.1, 0.1], 'LITFRAC_WARN_RANGE': [-0.2, 0.2]} - resl=qa(inp,**qargs) - #- test if amp QAs exist - qargs["amps"] = True - resl2=qa(inp,**qargs) - self.assertTrue(len(resl2['METRICS']['LITFRAC_AMP'])==4) - - def testCountSpectralBins(self): - return - qa=QA.CountSpectralBins('countbins',self.config) - inp=self.frame - qargs={} - qargs["RESULTKEY"] = 'NGOODFIB' - qargs["PSFFile"]=self.psf - qargs["FiberMap"]=self.fibermap - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["amps"]=True - qargs["paname"]="abc" - qargs["qafile"]=self.qafile - qargs["qafig"]=None - qargs["singleqa"]=None - qargs["param"]={'CUTBINS': 5, 'N_KNOWN_BROKEN_FIBERS': 0, 'NGOODFIB_NORMAL_RANGE': [-5, 5], 'NGOODFIB_WARN_RANGE': [-10, 10]} - resl=qa(inp,**qargs) - self.assertTrue(resl["METRICS"]["GOOD_FIBERS"].shape[0]==inp.nspec) - self.assertTrue((resl["METRICS"]["NGOODFIB"])<=inp.nspec) - - def testSkyCont(self): - return - qa=QA.Sky_Continuum('skycont',self.config) - inp=self.frame - qargs={} - qargs["RESULTKEY"] = 'SKYCONT' - qargs["FiberMap"]=self.fibermap - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["paname"]="abc" - qargs["singleqa"]=None - qargs["param"]={'B_CONT': ["4000, 4500", "5250, 5550"],'R_CONT': ["5950, 6200", "6990, 7230"],'Z_CONT': ["8120, 8270", "9110, 9280"]} - resl=qa(inp,**qargs) - self.assertTrue(resl["METRICS"]["SKYFIBERID"]==self.skyfibers) #- as defined in the fibermap - self.assertTrue(resl["METRICS"]["SKYCONT"]>0) - - def testSkyPeaks(self): - return - qa=QA.Sky_Peaks('skypeaks',self.config) - inp=self.frame - qargs={} - qargs["RESULTKEY"] = 'PEAKCOUNT' - qargs["FiberMap"]=self.fibermap - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["paname"]="abc" - qargs["dict_countbins"]=self.map2pix - qargs["singleqa"]=None - qargs["param"]={'B_PEAKS': [3914.4, 5199.3, 5201.8],'R_PEAKS': [6301.9, 6365.4, 7318.2, 7342.8, 7371.3],'Z_PEAKS': [8401.5, 8432.4, 8467.5, 9479.4, 9505.6, 9521.8],'PEAKCOUNT_NORMAL_RANGE': [-1.0, 1.0],'PEAKCOUNT_WARN_RANGE': [-2.0, 2.0]} - resl=qa(inp,**qargs) - - #self.assertTrue(np.all(resl['METRICS']['PEAKCOUNT_RMS_AMP'])>=0.) - self.assertTrue(resl['METRICS']['PEAKCOUNT_NOISE']>0) - - def testIntegrateSpec(self): - return - qa=QA.Integrate_Spec('integ',self.config) - inp=self.frame - qargs={} - qargs["RESULTKEY"] = 'DELTAMAG_TGT' - qargs["PSFFile"]=self.psf - qargs["FiberMap"]=self.fibermap - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["paname"]="abc" - qargs["dict_countbins"]=self.map2pix - qargs["singleqa"]=None - qargs["param"]={'DELTAMAG_TGT_NORMAL_RANGE': [-2., 2.0], 'DELTAMAG_TGT_WARN_RANGE': [-4., 4.]} - resl=qa(inp,**qargs) - self.assertTrue(len(resl["METRICS"]["STD_FIBERID"])>0) - -# RS: We are not using this QA anymore, so we don't need this test -# def testSkyResidual(self): -# qa=QA.Sky_Residual('skyresid',self.config) -# inp=self.frame -# sky=self.skymodel -# qargs={} -# qargs["PSFFile"]=self.psf -# qargs["FiberMap"]=self.fibermap -# qargs["camera"]=self.camera -# qargs["expid"]=self.expid -# qargs["paname"]="abc" -# qargs["dict_countbins"]=self.map2pix -# qargs["singleqa"]=None -# qargs["param"]={"BIN_SZ":0.2, "PCHI_RESID":0.05, "PER_RESID":95., "SKYRESID_NORMAL_RANGE":[-5.0, 5.0], "SKYRESID_WARN_RANGE":[-10.0, 10.0]} -# -# resl=qa(inp,sky,**qargs) -# -# #self.assertTrue(resl["METRICS"]["NREJ"]==self.skymodel.nrej) -# #self.assertTrue(len(resl["METRICS"]["MED_RESID_WAVE"]) == self.nwave) -# #self.assertTrue(len(resl["METRICS"]["MED_RESID_FIBER"]) == 5) #- 5 sky fibers in the input -# #self.assertTrue(resl["PARAMS"]["BIN_SZ"] == 0.1) -# ##- test with different parameter set: -# #resl2=qa(inp,sky,**qargs) -# #self.assertTrue(len(resl["METRICS"]["DEVS_1D"])>len(resl2["METRICS"]["DEVS_1D"])) #- larger histogram bin size than default 0.1 - - def testCalculateSNR(self): - return - qa=QA.Calculate_SNR('snr',self.config) - inp=self.frame - qargs={} - qargs["RESULTKEY"] = 'FIDSNR' - qargs["PSFFile"]=self.psf - qargs["FiberMap"]=self.fibermap - qargs["camera"]=self.camera - qargs["expid"]=self.expid - qargs["paname"]="abc" - qargs["qafile"]=self.qafile #- no LRG by construction. - qargs["dict_countbins"]=self.map2pix - qargs["singleqa"]=None - qargs["param"]={'RESIDUAL_CUT': 0.2, 'SIGMA_CUT': 2.0, 'FIDSNR_TGT_NORMAL_RANGE': [-11., 11.], 'FIDSNR_TGT_WARN_RANGE': [-12., 12.], 'FIDMAG': 22.} - resl=qa(inp,**qargs) - self.assertTrue("yaml" in qargs["qafile"]) - self.assertTrue(len(resl["METRICS"]["MEDIAN_SNR"])==self.nspec) #- positive definite diff --git a/deprecated/py/desispec/test/test_qlextract.py b/deprecated/py/desispec/test/test_qlextract.py deleted file mode 100644 index edfe77ff4..000000000 --- a/deprecated/py/desispec/test/test_qlextract.py +++ /dev/null @@ -1,86 +0,0 @@ -from __future__ import absolute_import, division, print_function - -try: - from specter.psf import load_psf - nospecter = False -except ImportError: - from desiutil.log import get_logger - log = get_logger() - log.error('specter not installed; skipping extraction tests') - nospecter = True - -import unittest -import uuid -import os -import tempfile -import shutil -from glob import glob -from importlib import resources - -import desispec.image -import desispec.io -import desispec.scripts.extract - -from astropy.io import fits -import numpy as np - -class TestExtract(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.origdir = os.getcwd() - cls.testdir = tempfile.mkdtemp() - os.chdir(cls.testdir) - cls.testhash = uuid.uuid4() - cls.imgfile = 'test-img-{}.fits'.format(cls.testhash) - cls.outfile = 'test-out-{}.fits'.format(cls.testhash) - cls.outmodel = 'test-model-{}.fits'.format(cls.testhash) - cls.fibermapfile = 'test-fibermap-{}.fits'.format(cls.testhash) - cls.psffile = resources.files('specter').joinpath('test/t/psf-monospot.fits') - # cls.psf = load_psf(cls.psffile) - - pix = np.random.normal(0, 3.0, size=(400,400)) - ivar = np.ones_like(pix) / 3.0**2 - mask = np.zeros(pix.shape, dtype=np.uint32) - mask[200] = 1 - img = desispec.image.Image(pix, ivar, mask, camera='z0') - desispec.io.write_image(cls.imgfile, img, meta=dict(flavor='science')) - - fibermap = desispec.io.empty_fibermap(100) - desispec.io.write_fibermap(cls.fibermapfile, fibermap) - - def setUp(self): - os.chdir(self.testdir) - for filename in (self.outfile, self.outmodel): - if os.path.exists(filename): - os.remove(filename) - - @classmethod - def tearDownClass(cls): - #- Remove testdir only if it was created by tempfile.mkdtemp - if cls.testdir.startswith(tempfile.gettempdir()) and os.path.exists(cls.testdir): - shutil.rmtree(cls.testdir) - - os.chdir(cls.origdir) - - def test_boxcar(self): - from desispec.quicklook.qlboxcar import do_boxcar - from desispec.io import read_xytraceset - - #psf = load_psf(self.psffile) - tset = read_xytraceset(self.psffile) - pix = np.random.normal(0, 3.0, size=(tset.npix_y, tset.npix_y)) - ivar = np.ones_like(pix) / 3.0**2 - mask = np.zeros(pix.shape, dtype=np.uint32) - img = desispec.image.Image(pix, ivar, mask, camera='z0') - - outwave = np.arange(7500, 7600) - nwave = len(outwave) - nspec = 5 - flux, ivar, resolution = do_boxcar(img, tset, outwave, boxwidth=2.5, nspec=nspec) - - self.assertEqual(flux.shape, (nspec, nwave)) - self.assertEqual(ivar.shape, (nspec, nwave)) - self.assertEqual(resolution.shape[0], nspec) - # resolution.shape[1] is number of diagonals; picked by algorithm - self.assertEqual(resolution.shape[2], nwave)