Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement DM-43176 to skip zero flux sources #96

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 30 additions & 6 deletions python/lsst/meas/extensions/scarlet/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@

from lsst.geom import Box2I, Extent2I, Point2I, Point2D
from lsst.afw.image import computePsfImage
from lsst.afw.detection import Footprint as afwFootprint, HeavyFootprintF, PeakCatalog
from lsst.afw.geom import SpanSet, Span
from lsst.afw.detection import Footprint

from .source import liteModelToHeavy
Expand Down Expand Up @@ -566,15 +568,37 @@ def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, maskImage, red
peakIdx = np.where(peaks["id"] == source.peakId)[0][0]
source.detectedPeak = peaks[peakIdx]
# Set the Footprint
heavy = liteModelToHeavy(
source=source,
blend=blend,
xy0=xy0,
useFlux=useFlux,
)
zeroFlux = False
try:
heavy = liteModelToHeavy(
source=source,
blend=blend,
xy0=xy0,
useFlux=useFlux,
)
except Exception as e:
logger.warn(e)
zeroFlux = True
# Add the location of the source to the peak catalog
peakCat = PeakCatalog(source.detectedPeak.table)
peakCat.append(source.detectedPeak)
# Create a Footprint with a single pixel, set to zero,
# to avoid breakage in measurement algorithms.
center = Point2I(peakCat[0]["i_x"] - xy0[0], peakCat[0]["i_y"] - xy0[1])
spanList = [Span(center.y + xy0[1], center.x + xy0[0], center.x + xy0[0])]
footprint = afwFootprint(SpanSet(spanList))
footprint.setPeakCatalog(peakCat)
heavy = HeavyFootprintF(footprint)
heavy.getImageArray()[0] = 0.0
sourceRecord.setFootprint(heavy)

if updateFluxColumns:
if zeroFlux or heavy.getArea() == 0:
# The source has no flux after being weighted with the PSF
# in this particular band (it might have flux in others).
sourceRecord.set("deblend_zeroFlux", True)
else:
sourceRecord.set("deblend_zeroFlux", False)
# Set the fraction of pixels with valid data.
coverage = calculateFootprintCoverage(heavy, maskImage)
sourceRecord.set("deblend_dataCoverage", coverage)
Expand Down
2 changes: 2 additions & 0 deletions python/lsst/meas/extensions/scarlet/scarletDeblendTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -908,6 +908,8 @@ def _addSchemaKeys(self, schema):
self.coverageKey = schema.addField('deblend_dataCoverage', type=np.float32,
doc='Fraction of pixels with data. '
'In other words, 1 - fraction of pixels with NO_DATA set.')
self.zeroFluxKey = schema.addField("deblend_zeroFlux", type="Flag",
doc="Source has zero flux.")
# Blendedness/classification metrics
self.maxOverlapKey = schema.addField("deblend_maxOverlap", type=np.float32,
doc="Maximum overlap with all of the other neighbors flux "
Expand Down
59 changes: 58 additions & 1 deletion tests/test_deblend.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from lsst.meas.algorithms import SourceDetectionTask
from lsst.meas.extensions.scarlet.scarletDeblendTask import ScarletDeblendTask
from lsst.meas.extensions.scarlet.source import bboxToScarletBox, scarletBoxToBBox
from lsst.meas.extensions.scarlet.io import dataToScarlet, DummyObservation
from lsst.meas.extensions.scarlet.io import dataToScarlet, DummyObservation, ScarletBlendData
from lsst.afw.table import SourceCatalog
from lsst.afw.detection import Footprint
from lsst.afw.geom import SpanSet, Stencil
Expand All @@ -40,6 +40,52 @@


class TestDeblend(lsst.utils.tests.TestCase):
def _insert_blank_source(self, modelData, catalog):
# Add parent
parent = catalog.addNew()
parent.setParent(0)
parent["deblend_nChild"] = 1
parent["deblend_nPeaks"] = 1
ss = SpanSet.fromShape(5, Stencil.CIRCLE, offset=(30, 70))
footprint = Footprint(ss)
peak = footprint.addPeak(30, 70, 0)
parent.setFootprint(footprint)

# Add the zero flux source
dtype = np.float32
center = (70, 30)
origin = (center[0]-5, center[1]-5)
src = catalog.addNew()
src.setParent(parent.getId())
src["deblend_peak_center_x"] = center[1]
src["deblend_peak_center_y"] = center[0]
src["deblend_nPeaks"] = 1

sources = {
src.getId(): {
"components": [],
"factorized": [{
"xy0": origin[::-1],
"center": (center[1] - origin[1], center[0] - origin[0]),
"sed": np.zeros((len(self.bands),), dtype=dtype),
"morph": np.zeros((11, 11), dtype=dtype),
"extent": (11, 11),
}],
"peakId": peak.getId(),
}
}

blendData = ScarletBlendData.fromDict({
"xy0": origin[::-1],
"extent": (11, 11),
"psfCenter": center[::-1],
"sources": sources,
"bands": self.bands,
})
pid = parent.getId()
modelData.blends[pid] = blendData
return pid, src.getId()

def test_deblend_task(self):
# Set the random seed so that the noise field is unaffected
np.random.seed(0)
Expand All @@ -66,6 +112,7 @@ def test_deblend_task(self):
images += noise

filters = "grizy"
self.bands = filters
_images = afwImage.MultibandMaskedImage.fromArrays(filters, images.astype(np.float32), None, noise**2)
coadds = [afwImage.Exposure(img, dtype=img.image.array.dtype) for img in _images]
coadds = afwImage.MultibandExposure.fromExposures(filters, coadds)
Expand Down Expand Up @@ -106,6 +153,8 @@ def test_deblend_task(self):
# Run the deblender
catalog, modelData = deblendTask.run(coadds, catalog)

bad_blend_id, bad_src_id = self._insert_blank_source(modelData, catalog)

# Attach the footprints in each band and compare to the full
# data model. This is done in each band, both with and without
# flux re-distribution to test all of the different possible
Expand Down Expand Up @@ -144,6 +193,8 @@ def test_deblend_task(self):
self.assertEqual(len(children), parent.get("deblend_nChild"))
# Check that parent columns are propagated
# to their children
if parent.getId() == bad_blend_id:
continue
for parentCol, childCol in config.columnInheritance.items():
np.testing.assert_array_equal(parent.get(parentCol), children[childCol])

Expand Down Expand Up @@ -197,6 +248,8 @@ def test_deblend_task(self):
# The HeavyFootprint needs to be projected onto
# the image of the flux-redistributed model,
# since the HeavyFootprint may trim rows or columns.
if child["deblend_zeroFlux"]:
continue
parentFootprint = catalog[catalog["id"] == child["parent"]][0].getFootprint()
blend.observation.images = redistributeImage[parentFootprint.getBBox()].array
blend.observation.images = blend.observation.images[None, :, :]
Expand Down Expand Up @@ -234,6 +287,10 @@ def test_deblend_task(self):
skipped = largeFootprint | denseFootprint
np.testing.assert_array_equal(skipped, catalog["deblend_skipped"])

# Check that the zero flux source was flagged
for src in catalog:
np.testing.assert_equal(src["deblend_zeroFlux"], src.getId() == bad_src_id)


class MemoryTester(lsst.utils.tests.MemoryTestCase):
pass
Expand Down
Loading