From 9cf92a698d85818edf9faf090be6859b40f0ab75 Mon Sep 17 00:00:00 2001 From: Jim Bosch Date: Mon, 6 Nov 2023 13:19:56 -0500 Subject: [PATCH] Adapt to daf_butler API changes. --- python/lsst/pipe/tasks/hips.py | 19 +++++++++++-------- python/lsst/pipe/tasks/postprocess.py | 10 +++++----- python/lsst/pipe/tasks/skyCorrection.py | 3 +-- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/python/lsst/pipe/tasks/hips.py b/python/lsst/pipe/tasks/hips.py index 79ab1ec8d..36b79dbe4 100644 --- a/python/lsst/pipe/tasks/hips.py +++ b/python/lsst/pipe/tasks/hips.py @@ -41,7 +41,7 @@ from lsst.sphgeom import RangeSet, HealpixPixelization from lsst.utils.timer import timeMethod -from lsst.daf.butler import Butler, DataCoordinate, DatasetRef, Quantum, SkyPixDimension +from lsst.daf.butler import Butler, DataCoordinate, DatasetRef, Quantum import lsst.pex.config as pexConfig import lsst.pipe.base as pipeBase import lsst.afw.geom as afwGeom @@ -548,16 +548,19 @@ def build_quantum_graph( output_dataset_type = dataset_types.outputs[task_def.connections.hips_exposures.name] incidental_output_dataset_types = dataset_types.outputs.copy() incidental_output_dataset_types.remove(output_dataset_type) - (hpx_output_dimension,) = (d for d in output_dataset_type.dimensions - if isinstance(d, SkyPixDimension)) + (hpx_output_dimension,) = ( + registry.dimensions.skypix_dimensions[d] for d in output_dataset_type.dimensions.skypix.names + ) constraint_hpx_pixelization = registry.dimensions[f"healpix{constraint_order}"].pixelization common_skypix_name = registry.dimensions.commonSkyPix.name common_skypix_pixelization = registry.dimensions.commonSkyPix.pixelization # We will need all the pixels at the quantum resolution as well - task_dimensions = registry.dimensions.extract(task_def.connections.dimensions) - (hpx_dimension,) = (d for d in task_dimensions if d.name != "band") + task_dimensions = registry.dimensions.conform(task_def.connections.dimensions) + (hpx_dimension,) = ( + registry.dimensions.skypix_dimensions[d] for d in task_dimensions.names if d != "band" + ) hpx_pixelization = hpx_dimension.pixelization if hpx_pixelization.level < constraint_order: @@ -608,7 +611,7 @@ def build_quantum_graph( bind=bind ).expanded() inputs_by_patch = defaultdict(set) - patch_dimensions = registry.dimensions.extract(["patch"]) + patch_dimensions = registry.dimensions.conform(["patch"]) for input_ref in input_refs: inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(input_ref) if not inputs_by_patch: @@ -664,7 +667,7 @@ def build_quantum_graph( raise RuntimeError("Given constraints yielded empty quantum graph.") # Define initOutputs refs. - empty_data_id = DataCoordinate.makeEmpty(registry.dimensions) + empty_data_id = DataCoordinate.make_empty(registry.dimensions) init_outputs = {} global_init_outputs = [] if config_dataset_type := dataset_types.initOutputs.get(task_def.configDatasetName): @@ -862,7 +865,7 @@ class GenerateHipsTask(pipeBase.PipelineTask): def runQuantum(self, butlerQC, inputRefs, outputRefs): inputs = butlerQC.get(inputRefs) - dims = inputRefs.hips_exposure_handles[0].dataId.names + dims = inputRefs.hips_exposure_handles[0].dataId.dimensions.names order = None for dim in dims: if "healpix" in dim: diff --git a/python/lsst/pipe/tasks/postprocess.py b/python/lsst/pipe/tasks/postprocess.py index 36d3fd96c..de6eef491 100644 --- a/python/lsst/pipe/tasks/postprocess.py +++ b/python/lsst/pipe/tasks/postprocess.py @@ -1022,7 +1022,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs): raise ValueError("config.functorFile is None. " "Must be a valid path to yaml in order to run Task as a PipelineTask.") result = self.run(handle=inputs['inputCatalog'], funcs=self.funcs, - dataId=outputRefs.outputCatalog.dataId.full) + dataId=dict(outputRefs.outputCatalog.dataId.mapping)) outputs = pipeBase.Struct(outputCatalog=result) butlerQC.put(outputs, outputRefs) @@ -1071,7 +1071,7 @@ def transform(self, band, handles, funcs, dataId): if dataId and self.config.columnsFromDataId: for key in self.config.columnsFromDataId: if key in dataId: - df[str(key)] = dataId[key] + df[key] = dataId[key] else: raise ValueError(f"'{key}' in config.columnsFromDataId not found in dataId: {dataId}") @@ -1400,7 +1400,7 @@ def __init__(self, **kwargs): def runQuantum(self, butlerQC, inputRefs, outputRefs): dataRefs = butlerQC.get(inputRefs.calexp) - visit = dataRefs[0].dataId.byName()['visit'] + visit = dataRefs[0].dataId['visit'] self.log.debug("Concatenating metadata from %d per-detector calexps (visit %d)", len(dataRefs), visit) @@ -1770,7 +1770,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs): # Add ccdVisitId to allow joining with CcdVisitTable idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId) inputs['ccdVisitId'] = idGenerator.catalog_id - inputs['band'] = butlerQC.quantum.dataId.full['band'] + inputs['band'] = butlerQC.quantum.dataId['band'] outputs = self.run(**inputs) butlerQC.put(outputs, outputRefs) @@ -1870,7 +1870,7 @@ def runQuantum(self, butlerQC, inputRefs, outputRefs): raise ValueError("config.functorFile is None. " "Must be a valid path to yaml in order to run Task as a PipelineTask.") outputs = self.run(inputs['inputCatalogs'], inputs['referenceCatalog'], funcs=self.funcs, - dataId=outputRefs.outputCatalog.dataId.full) + dataId=dict(outputRefs.outputCatalog.dataId.mapping)) butlerQC.put(outputs, outputRefs) diff --git a/python/lsst/pipe/tasks/skyCorrection.py b/python/lsst/pipe/tasks/skyCorrection.py index 176ad3839..4cb85d435 100644 --- a/python/lsst/pipe/tasks/skyCorrection.py +++ b/python/lsst/pipe/tasks/skyCorrection.py @@ -27,7 +27,6 @@ import lsst.afw.math as afwMath import lsst.pipe.base.connectionTypes as cT import numpy as np -from lsst.daf.butler import DimensionGraph from lsst.pex.config import Config, ConfigField, ConfigurableField, Field, FieldValidationError from lsst.pipe.base import PipelineTask, PipelineTaskConfig, PipelineTaskConnections, Struct from lsst.pipe.tasks.background import ( @@ -59,7 +58,7 @@ def _skyFrameLookup(datasetType, registry, quantumDataId, collections): results : `list` [`lsst.daf.butler.DatasetRef`] List of datasets that will be used as sky calibration frames. """ - newDataId = quantumDataId.subset(DimensionGraph(registry.dimensions, names=["instrument", "visit"])) + newDataId = quantumDataId.subset(registry.dimensions.conform(["instrument", "visit"])) skyFrames = [] for dataId in registry.queryDataIds(["visit", "detector"], dataId=newDataId).expanded(): skyFrame = registry.findDataset(