diff --git a/python/lsst/meas/extensions/scarlet/scarletDeblendTask.py b/python/lsst/meas/extensions/scarlet/scarletDeblendTask.py index dd10cec..db67be1 100644 --- a/python/lsst/meas/extensions/scarlet/scarletDeblendTask.py +++ b/python/lsst/meas/extensions/scarlet/scarletDeblendTask.py @@ -34,7 +34,7 @@ from lsst.utils.logging import PeriodicLogger from lsst.utils.timer import timeMethod -from .utils import bboxToScarletBox, defaultBadPixelMasks, buildObservation +from .utils import bboxToScarletBox, defaultBadPixelMasks, buildObservation, InvalidPsfSizeError # Scarlet and proxmin have a different definition of log levels than the stack, # so even "warnings" occur far more often than we would like. @@ -141,6 +141,7 @@ def deblend(mExposure, modelPsf, footprint, config, spectrumInit, monotonicity, badPixelMasks=config.badMask, useWeights=config.useWeights, convolutionType=config.convolutionType, + maxPsfSize=config.maxPsfSize, ) # Convert the peaks into an array @@ -391,6 +392,13 @@ class ScarletDeblendConfig(pexConfig.Config): "a high density of sources from running out of memory. " "If `maxSpectrumCutoff == -1` then there is no cutoff.") ) + maxPsfSize = pexConfig.Field( + dtype=int, + default=25, + doc="Maximum PSF size. " + "This is used to prevent unexpected behavior " + "when the PSF is too large due to an upstream bug." + ) # Failure modes fallback = pexConfig.Field( dtype=bool, default=True, @@ -544,6 +552,8 @@ def _addSchemaKeys(self, schema): doc='True when a blend has at least one band ' 'that could not generate a PSF and was ' 'not included in the model.') + self.psfTooLargeKey = schema.addField('deblend_psfTooLarge', type='Flag', + doc='PSF was too large to be used') # Deblended source fields self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center", doc="Center used to apply constraints in scarlet", @@ -767,6 +777,10 @@ def deblend(self, mExposure, catalog): blendError = type(e).__name__ if isinstance(e, ScarletGradientError): parent.set(self.iterKey, e.iterations) + elif isinstance(e, InvalidPsfSizeError): + parent.set(self.psfTooLargeKey, True) + centroid = parent.getFootprint().getCentroid() + self.log.warn(f"PSF too large for parent {parent.getId()} at {centroid}") else: blendError = "UnknownError" if self.config.catchFailures: diff --git a/python/lsst/meas/extensions/scarlet/utils.py b/python/lsst/meas/extensions/scarlet/utils.py index 1be21e5..d7eb550 100644 --- a/python/lsst/meas/extensions/scarlet/utils.py +++ b/python/lsst/meas/extensions/scarlet/utils.py @@ -1,3 +1,4 @@ +import logging import numpy as np from lsst.afw.detection import Footprint as afwFootprint @@ -16,9 +17,10 @@ import lsst.geom as geom import lsst.scarlet.lite as scl - defaultBadPixelMasks = ["BAD", "CR", "NO_DATA", "SAT", "SUSPECT", "EDGE"] +logger = logging.getLogger(__name__) + def footprintsToNumpy( catalog: SourceCatalog, @@ -103,14 +105,27 @@ def bboxToScarletBox(bbox: geom.Box2I, xy0: geom.Point2I = geom.Point2I()) -> sc return scl.Box((bbox.getHeight(), bbox.getWidth()), origin) -def computePsfKernelImage(mExposure, psfCenter): +class InvalidPsfSizeError(Exception): + """Exception raised when the PSF size is invalid.""" + pass + + +def computePsfKernelImage( + mExposure: MultibandExposure, + psfCenter: tuple[int, int] | geom.Point2I | geom.Point2D, + maxPsfSize: int = 101 +): """Compute the PSF kernel image and update the multiband exposure if not all of the PSF images could be computed. Parameters ---------- - psfCenter : `tuple` or `Point2I` or `Point2D` + mExposure : + The multi-band exposure that the model represents. + psfCenter : The location `(x, y)` used as the center of the PSF. + maxSize : + The maximum size of the PSF in any dimension. Returns ------- @@ -122,9 +137,13 @@ def computePsfKernelImage(mExposure, psfCenter): """ if not isinstance(psfCenter, geom.Point2D): psfCenter = geom.Point2D(*psfCenter) - try: psfModels = mExposure.computePsfKernelImage(psfCenter) + shape = psfModels.array.shape + if np.any(np.array(shape) > maxPsfSize): + msg = (f"PSF size {shape} is too large at {psfCenter}, " + f"maximum size of any dimension is {maxPsfSize}.") + raise InvalidPsfSizeError(msg) except IncompleteDataError as e: psfModels = e.partialPsf # Use only the bands that successfully generated a PSF image. @@ -146,6 +165,7 @@ def buildObservation( footprint: afwFootprint = None, useWeights: bool = True, convolutionType: str = "real", + maxPsfSize: int = 101, ) -> scl.Observation: """Generate an Observation from a set of arguments. @@ -178,16 +198,23 @@ def buildObservation( The type of convolution to use (either "real" or "fft"). When reconstructing an image it is advised to use "real" to avoid polluting the footprint with artifacts from the fft. + maxPsfSize: + The maximum size of the PSF in any dimension. Returns ------- observation: The observation constructed from the input parameters. + + Raises + ------ + InvalidPsfSizeError: + If the PSF size is larger than `maxPsfSize` in any dimension. """ # Initialize the observed PSFs if not isinstance(psfCenter, geom.Point2D): psfCenter = geom.Point2D(*psfCenter) - psfModels, mExposure = computePsfKernelImage(mExposure, psfCenter) + psfModels, mExposure = computePsfKernelImage(mExposure, psfCenter, maxPsfSize) # Use the inverse variance as the weights if useWeights: