Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-46452: Skip footprints with too large PSF sizes #101

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion python/lsst/meas/extensions/scarlet/scarletDeblendTask.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
from lsst.utils.logging import PeriodicLogger
from lsst.utils.timer import timeMethod

from .utils import bboxToScarletBox, defaultBadPixelMasks, buildObservation
from .utils import bboxToScarletBox, defaultBadPixelMasks, buildObservation, InvalidPsfSizeError

# Scarlet and proxmin have a different definition of log levels than the stack,
# so even "warnings" occur far more often than we would like.
Expand Down Expand Up @@ -141,6 +141,7 @@ def deblend(mExposure, modelPsf, footprint, config, spectrumInit, monotonicity,
badPixelMasks=config.badMask,
useWeights=config.useWeights,
convolutionType=config.convolutionType,
maxPsfSize=config.maxPsfSize,
)

# Convert the peaks into an array
Expand Down Expand Up @@ -391,6 +392,13 @@ class ScarletDeblendConfig(pexConfig.Config):
"a high density of sources from running out of memory. "
"If `maxSpectrumCutoff == -1` then there is no cutoff.")
)
maxPsfSize = pexConfig.Field(
dtype=int,
default=25,
doc="Maximum PSF size. "
"This is used to prevent unexpected behavior "
"when the PSF is too large due to an upstream bug."
)
# Failure modes
fallback = pexConfig.Field(
dtype=bool, default=True,
Expand Down Expand Up @@ -544,6 +552,8 @@ def _addSchemaKeys(self, schema):
doc='True when a blend has at least one band '
'that could not generate a PSF and was '
'not included in the model.')
self.psfTooLargeKey = schema.addField('deblend_psfTooLarge', type='Flag',
doc='PSF was too large to be used')
# Deblended source fields
self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center",
doc="Center used to apply constraints in scarlet",
Expand Down Expand Up @@ -767,6 +777,10 @@ def deblend(self, mExposure, catalog):
blendError = type(e).__name__
if isinstance(e, ScarletGradientError):
parent.set(self.iterKey, e.iterations)
elif isinstance(e, InvalidPsfSizeError):
parent.set(self.psfTooLargeKey, True)
centroid = parent.getFootprint().getCentroid()
self.log.warn(f"PSF too large for parent {parent.getId()} at {centroid}")
else:
blendError = "UnknownError"
if self.config.catchFailures:
Expand Down
37 changes: 32 additions & 5 deletions python/lsst/meas/extensions/scarlet/utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
import numpy as np

from lsst.afw.detection import Footprint as afwFootprint
Expand All @@ -16,9 +17,10 @@
import lsst.geom as geom
import lsst.scarlet.lite as scl


defaultBadPixelMasks = ["BAD", "CR", "NO_DATA", "SAT", "SUSPECT", "EDGE"]

logger = logging.getLogger(__name__)


def footprintsToNumpy(
catalog: SourceCatalog,
Expand Down Expand Up @@ -103,14 +105,27 @@ def bboxToScarletBox(bbox: geom.Box2I, xy0: geom.Point2I = geom.Point2I()) -> sc
return scl.Box((bbox.getHeight(), bbox.getWidth()), origin)


def computePsfKernelImage(mExposure, psfCenter):
class InvalidPsfSizeError(Exception):
"""Exception raised when the PSF size is invalid."""
pass


def computePsfKernelImage(
mExposure: MultibandExposure,
psfCenter: tuple[int, int] | geom.Point2I | geom.Point2D,
maxPsfSize: int = 101
):
"""Compute the PSF kernel image and update the multiband exposure
if not all of the PSF images could be computed.

Parameters
----------
psfCenter : `tuple` or `Point2I` or `Point2D`
mExposure :
The multi-band exposure that the model represents.
psfCenter :
The location `(x, y)` used as the center of the PSF.
maxSize :
The maximum size of the PSF in any dimension.

Returns
-------
Expand All @@ -122,9 +137,13 @@ def computePsfKernelImage(mExposure, psfCenter):
"""
if not isinstance(psfCenter, geom.Point2D):
psfCenter = geom.Point2D(*psfCenter)

try:
psfModels = mExposure.computePsfKernelImage(psfCenter)
shape = psfModels.array.shape
if np.any(np.array(shape) > maxPsfSize):
msg = (f"PSF size {shape} is too large at {psfCenter}, "
f"maximum size of any dimension is {maxPsfSize}.")
raise InvalidPsfSizeError(msg)
except IncompleteDataError as e:
psfModels = e.partialPsf
# Use only the bands that successfully generated a PSF image.
Expand All @@ -146,6 +165,7 @@ def buildObservation(
footprint: afwFootprint = None,
useWeights: bool = True,
convolutionType: str = "real",
maxPsfSize: int = 101,
) -> scl.Observation:
"""Generate an Observation from a set of arguments.

Expand Down Expand Up @@ -178,16 +198,23 @@ def buildObservation(
The type of convolution to use (either "real" or "fft").
When reconstructing an image it is advised to use "real" to avoid
polluting the footprint with artifacts from the fft.
maxPsfSize:
The maximum size of the PSF in any dimension.

Returns
-------
observation:
The observation constructed from the input parameters.

Raises
------
InvalidPsfSizeError:
If the PSF size is larger than `maxPsfSize` in any dimension.
"""
# Initialize the observed PSFs
if not isinstance(psfCenter, geom.Point2D):
psfCenter = geom.Point2D(*psfCenter)
psfModels, mExposure = computePsfKernelImage(mExposure, psfCenter)
psfModels, mExposure = computePsfKernelImage(mExposure, psfCenter, maxPsfSize)

# Use the inverse variance as the weights
if useWeights:
Expand Down
Loading