From 7c8bbd52c55dfa6845f1bf4b9bdd74a48211d54c Mon Sep 17 00:00:00 2001 From: Kort Travis Date: Mon, 23 Sep 2024 18:15:19 -0400 Subject: [PATCH] Reduction process: effective-instrument geometry. At the end of the reduction process, the instrument associated with each output workspace is modified. A new _effective_ instrument is substituted for each workspace. This instrument has the same number of pixels as there are group-ids, and the location of each pixel is set to the mean location of the _unmasked_ original pixels participating in that pixel group. By implication, this substitution results in there being one pixel per spectrum in the output workspaces. This commit includes the following changes: * A new `EffectiveInstrumentRecipe` implemented as a subrecipe called by `ReductionRecipe` for each grouping; * Modifications to `LocalDataService.writeReductionData` to use updated Mantid algorithms, now allowing limited I/O of programmatically-generated instruments; * Modification of `ReductionIngredients` to include the _unmasked_ `PixelGroup`s; * Modification of `SousChef.prepReductionIngredients` to prepare the _unmasked_ `PixelGroup`s; * Modification of existing unit tests, and implementation of new unit tests to verify the new subrecipe's execution. Associated with this PR are three Mantid PRs, including changes to the `EditInstrumentGeometry`, `SaveNexusESS`, and `LoadNexusProcessed` algorithms. --- environment.yml | 4 +- .../EffectiveInstrumentIngredients.py | 12 ++ .../dao/ingredients/ReductionIngredients.py | 9 +- src/snapred/backend/dao/state/PixelGroup.py | 2 +- .../backend/data/DataFactoryService.py | 19 +- src/snapred/backend/data/LocalDataService.py | 58 ++++- src/snapred/backend/error/ContinueWarning.py | 3 +- .../backend/error/RecoverableException.py | 3 +- .../recipe/EffectiveInstrumentRecipe.py | 82 +++++++ ...ixelGroupingParametersCalculationRecipe.py | 2 +- src/snapred/backend/recipe/Recipe.py | 2 +- .../recipe/ReductionGroupProcessingRecipe.py | 25 --- src/snapred/backend/recipe/ReductionRecipe.py | 12 +- .../backend/service/ReductionService.py | 37 ++-- src/snapred/backend/service/SousChef.py | 23 +- src/snapred/resources/application.yml | 6 + src/snapred/ui/workflow/DiffCalWorkflow.py | 2 +- .../cis_tests/effective_instrument_script.py | 116 ++++++++++ tests/resources/application.yml | 7 +- .../calibration/ReductionIngredients.json | 202 ++++++++++++++++++ .../backend/data/test_DataFactoryService.py | 18 +- .../backend/data/test_LocalDataService.py | 163 ++++++++++++-- .../recipe/test_EffectiveInstrumentRecipe.py | 183 ++++++++++++++++ .../recipe/test_PreprocessReductionRecipe.py | 18 +- .../backend/recipe/test_ReductionRecipe.py | 35 ++- .../backend/service/test_ReductionService.py | 85 +++++++- tests/unit/backend/service/test_SousChef.py | 68 +++--- .../unit/ui/workflow/test_DiffCalWorkflow.py | 35 ++- tests/util/SculleryBoy.py | 5 +- 29 files changed, 1067 insertions(+), 169 deletions(-) create mode 100644 src/snapred/backend/dao/ingredients/EffectiveInstrumentIngredients.py create mode 100644 src/snapred/backend/recipe/EffectiveInstrumentRecipe.py create mode 100644 tests/cis_tests/effective_instrument_script.py create mode 100644 tests/unit/backend/recipe/test_EffectiveInstrumentRecipe.py diff --git a/environment.yml b/environment.yml index 9778e71fc..6610d510d 100644 --- a/environment.yml +++ b/environment.yml @@ -2,12 +2,12 @@ name: SNAPRed channels: - conda-forge - default -- mantid-ornl/label/rc +- mantid/label/nightly dependencies: - python=3.10 - pip - pydantic>=2.7.3,<3 -- mantidworkbench=6.10.0.2rc1 +- mantidworkbench>=6.11.20241111 - qtpy - pre-commit - pytest diff --git a/src/snapred/backend/dao/ingredients/EffectiveInstrumentIngredients.py b/src/snapred/backend/dao/ingredients/EffectiveInstrumentIngredients.py new file mode 100644 index 000000000..356c70cb4 --- /dev/null +++ b/src/snapred/backend/dao/ingredients/EffectiveInstrumentIngredients.py @@ -0,0 +1,12 @@ +from pydantic import BaseModel, ConfigDict + +from snapred.backend.dao.state.PixelGroup import PixelGroup + + +class EffectiveInstrumentIngredients(BaseModel): + + unmaskedPixelGroup: PixelGroup + + model_config = ConfigDict( + extra="forbid", + ) diff --git a/src/snapred/backend/dao/ingredients/ReductionIngredients.py b/src/snapred/backend/dao/ingredients/ReductionIngredients.py index c25955881..93ff8377d 100644 --- a/src/snapred/backend/dao/ingredients/ReductionIngredients.py +++ b/src/snapred/backend/dao/ingredients/ReductionIngredients.py @@ -11,6 +11,7 @@ from snapred.backend.dao.ingredients.GenerateFocussedVanadiumIngredients import GenerateFocussedVanadiumIngredients from snapred.backend.dao.ingredients.PreprocessReductionIngredients import PreprocessReductionIngredients from snapred.backend.dao.ingredients.ReductionGroupProcessingIngredients import ReductionGroupProcessingIngredients +from snapred.backend.dao.ingredients.EffectiveInstrumentIngredients import EffectiveInstrumentIngredients from snapred.backend.dao.state.PixelGroup import PixelGroup @@ -22,6 +23,7 @@ class ReductionIngredients(BaseModel): timestamp: float pixelGroups: List[PixelGroup] + unmaskedPixelGroups: List[PixelGroup] # these should come from calibration / normalization records # But will not exist if we proceed without calibration / normalization @@ -62,7 +64,12 @@ def applyNormalization(self, groupingIndex: int) -> ApplyNormalizationIngredient return ApplyNormalizationIngredients( pixelGroup=self.pixelGroups[groupingIndex], ) - + + def effectiveInstrument(self, groupingIndex: int) -> EffectiveInstrumentIngredients: + return EffectiveInstrumentIngredients( + unmaskedPixelGroup=self.unmaskedPixelGroups[groupingIndex] + ) + model_config = ConfigDict( extra="forbid", ) diff --git a/src/snapred/backend/dao/state/PixelGroup.py b/src/snapred/backend/dao/state/PixelGroup.py index e0949f835..33b2764bd 100644 --- a/src/snapred/backend/dao/state/PixelGroup.py +++ b/src/snapred/backend/dao/state/PixelGroup.py @@ -10,7 +10,7 @@ class PixelGroup(BaseModel): - # allow initializtion from either dictionary or list + # allow initialization from either dictionary or list pixelGroupingParameters: Union[List[PixelGroupingParameters], Dict[int, PixelGroupingParameters]] = {} nBinsAcrossPeakWidth: int = Config["calibration.diffraction.nBinsAcrossPeakWidth"] focusGroup: FocusGroup diff --git a/src/snapred/backend/data/DataFactoryService.py b/src/snapred/backend/data/DataFactoryService.py index 47f92aa3b..dcf28a8f5 100644 --- a/src/snapred/backend/data/DataFactoryService.py +++ b/src/snapred/backend/data/DataFactoryService.py @@ -191,24 +191,21 @@ def getReductionState(self, runId: str, useLiteMode: bool) -> ReductionState: return reductionState @validate_call - def getReductionDataPath(self, runId: str, useLiteMode: bool, version: int) -> Path: - return self.lookupService._constructReductionDataPath(runId, useLiteMode, version) + def getReductionDataPath(self, runId: str, useLiteMode: bool, timestamp: float) -> Path: + return self.lookupService._constructReductionDataPath(runId, useLiteMode, timestamp) @validate_call - def getReductionRecord(self, runId: str, useLiteMode: bool, version: Optional[int] = None) -> ReductionRecord: - """ - If no version is passed, will use the latest version applicable to runId - """ - return self.lookupService.readReductionRecord(runId, useLiteMode, version) + def getReductionRecord(self, runId: str, useLiteMode: bool, timestamp: float) -> ReductionRecord: + return self.lookupService.readReductionRecord(runId, useLiteMode, timestamp) @validate_call - def getReductionData(self, runId: str, useLiteMode: bool, version: int) -> ReductionRecord: - return self.lookupService.readReductionData(runId, useLiteMode, version) + def getReductionData(self, runId: str, useLiteMode: bool, timestamp: float) -> ReductionRecord: + return self.lookupService.readReductionData(runId, useLiteMode, timestamp) @validate_call - def getCompatibleReductionMasks(self, runNumber: str, useLiteMode: bool) -> List[WorkspaceName]: + def getCompatibleReductionMasks(self, runId: str, useLiteMode: bool) -> List[WorkspaceName]: # Assemble a list of masks, both resident and otherwise, that are compatible with the current reduction - return self.lookupService.getCompatibleReductionMasks(runNumber, useLiteMode) + return self.lookupService.getCompatibleReductionMasks(runId, useLiteMode) ##### WORKSPACE METHODS ##### diff --git a/src/snapred/backend/data/LocalDataService.py b/src/snapred/backend/data/LocalDataService.py index 9daddf1ce..b8add744b 100644 --- a/src/snapred/backend/data/LocalDataService.py +++ b/src/snapred/backend/data/LocalDataService.py @@ -242,7 +242,7 @@ def getIPTS(self, runNumber: str, instrumentName: str = Config["instrument.name" def stateExists(self, runId: str) -> bool: stateId, _ = self.generateStateId(runId) statePath = self.constructCalibrationStateRoot(stateId) - # Shouldnt need to check lite as we init both at the same time + # Shouldn't need to check lite as we init both at the same time return statePath.exists() def workspaceIsInstance(self, wsName: str, wsType: Any) -> bool: @@ -378,7 +378,7 @@ def _constructReductionRecordFilePath(self, runNumber: str, useLiteMode: bool, t @validate_call def _constructReductionDataFilePath(self, runNumber: str, useLiteMode: bool, timestamp: float) -> Path: fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] filePath = self._constructReductionDataPath(runNumber, useLiteMode, timestamp) / fileName return filePath @@ -644,6 +644,27 @@ def writeReductionData(self, record: ReductionRecord): Persists the reduction data associated with a `ReductionRecord` -- `writeReductionRecord` must have been called prior to this method. """ + + # Implementation notes: + # + # 1) For SNAPRed's current reduction-workflow output implementation: + # + # *`SaveNexusESS` _must_ be used, `SaveNexus` by itself won't work; + # + # * ONLY a simplified instrument geometry can be saved, + # for example, as produced by `EditInstrumentGeometry`: + # this geometry includes no monitors, only a single non-nested detector bank, and no parameter map. + # + # * `LoadNexus` should work with all of this _automatically_. + # + # Hopefully this will eventually be fixed, but right now this is a limitation of Mantid's + # instrument-I/O implementation (for non XML-based instruments). + # + # 2) For SNAPRed internal use: + # if `reduction.output.useEffectiveInstrument` is set to false in "application.yml", + # output workspaces will be saved without converting their instruments to the reduced form. + # This case is retained to allow some flexibility in what specifically is saved with the reduction data. + # runNumber, useLiteMode, timestamp = record.runNumber, record.useLiteMode, record.timestamp @@ -655,15 +676,42 @@ def writeReductionData(self, record: ReductionRecord): # WARNING: `writeReductionRecord` must be called before `writeReductionData`. raise RuntimeError(f"reduction version directories {filePath.parent} do not exist") + useEffectiveInstrument = Config["reduction.output.useEffectiveInstrument"] + for ws in record.workspaceNames: # Append workspaces to hdf5 file, in order of the `workspaces` list - self.writeWorkspace(filePath.parent, Path(filePath.name), ws, append=True) - + if ws.tokens("workspaceType") == wngt.REDUCTION_PIXEL_MASK: + + # The mask workspace always uses the non-reduced instrument. + self.mantidSnapper.SaveNexus( + f"Append workspace '{ws}' to reduction output", + InputWorkspace=ws, + Filename=str(filePath), + Append=True, + ) + self.mantidSnapper.executeQueue() + # Write an additional copy of the combined pixel mask as a separate `SaveDiffCal`-format file maskFilename = ws + ".h5" self.writePixelMask(filePath.parent, Path(maskFilename), ws) - + else: + if useEffectiveInstrument: + self.mantidSnapper.SaveNexusESS( + f"Append workspace '{ws}' to reduction output", + InputWorkspace=ws, + Filename=str(filePath), + Append=True, + ) + else: + self.mantidSnapper.SaveNexus( + f"Append workspace '{ws}' to reduction output", + InputWorkspace=ws, + Filename=str(filePath), + Append=True, + ) + self.mantidSnapper.executeQueue() + # Append the "metadata" group, containing the `ReductionRecord` metadata with h5py.File(filePath, "a") as h5: n5m.insertMetadataGroup(h5, record.dict(), "/metadata") diff --git a/src/snapred/backend/error/ContinueWarning.py b/src/snapred/backend/error/ContinueWarning.py index 69869ae95..7554d1d48 100644 --- a/src/snapred/backend/error/ContinueWarning.py +++ b/src/snapred/backend/error/ContinueWarning.py @@ -30,8 +30,7 @@ def flags(self): return self.model.flags def __init__(self, message: str, flags: "Type" = 0): - ContinueWarning.Model.update_forward_refs() - ContinueWarning.Model.model_rebuild(force=True) + ContinueWarning.Model.model_rebuild(force=True) # replaces: `update_forward_refs` method self.model = ContinueWarning.Model(message=message, flags=flags) super().__init__(message) diff --git a/src/snapred/backend/error/RecoverableException.py b/src/snapred/backend/error/RecoverableException.py index ad96d254a..57007e7f9 100644 --- a/src/snapred/backend/error/RecoverableException.py +++ b/src/snapred/backend/error/RecoverableException.py @@ -37,8 +37,7 @@ def data(self): return self.model.data def __init__(self, message: str, flags: "Type" = 0, data: Optional[Any] = None): - RecoverableException.Model.update_forward_refs() - RecoverableException.Model.model_rebuild(force=True) + RecoverableException.Model.model_rebuild(force=True) # replaces: `update_forward_refs` method self.model = RecoverableException.Model(message=message, flags=flags, data=data) logger.error(f"{extractTrueStacktrace()}") super().__init__(message) diff --git a/src/snapred/backend/recipe/EffectiveInstrumentRecipe.py b/src/snapred/backend/recipe/EffectiveInstrumentRecipe.py new file mode 100644 index 000000000..5e2ed7418 --- /dev/null +++ b/src/snapred/backend/recipe/EffectiveInstrumentRecipe.py @@ -0,0 +1,82 @@ +from typing import Any, Dict, List, Tuple +import numpy as np + +from snapred.backend.dao.ingredients import EffectiveInstrumentIngredients as Ingredients +from snapred.backend.error.AlgorithmException import AlgorithmException +from snapred.backend.log.logger import snapredLogger +from snapred.backend.recipe.Recipe import Recipe +from snapred.meta.decorators.Singleton import Singleton +from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceName + +logger = snapredLogger.getLogger(__name__) + +Pallet = Tuple[Ingredients, Dict[str, str]] + +@Singleton +class EffectiveInstrumentRecipe(Recipe[Ingredients]): + + def unbagGroceries(self, groceries: Dict[str, Any]): + self.inputWS = groceries["inputWorkspace"] + self.outputWS = groceries.get("outputWorkspace", groceries["inputWorkspace"]) + + def chopIngredients(self, ingredients): + self.unmaskedPixelGroup = ingredients.unmaskedPixelGroup + + def queueAlgos(self): + """ + Queues up the processing algorithms for the recipe. + Requires: unbagged groceries. + """ + # `EditInstrumentGeometry` modifies in-place, so we need to clone if a distinct output workspace is required. + if self.outputWS != self.inputWS: + self.mantidSnapper.CloneWorkspace( + "Clone workspace for reduced instrument", + OutputWorkspace=self.outputWS, + InputWorkspace=self.inputWS + ) + self.mantidSnapper.EditInstrumentGeometry( + f"Editing instrument geometry for grouping '{self.unmaskedPixelGroup.focusGroup.name}'", + Workspace=self.outputWS, + # TODO: Mantid defect: allow SI units here! + L2=np.rad2deg(self.unmaskedPixelGroup.L2), + Polar=np.rad2deg(self.unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(self.unmaskedPixelGroup.azimuth), + # + InstrumentName=f"SNAP_{self.unmaskedPixelGroup.focusGroup.name}" + ) + + def validateInputs(self, ingredients: Ingredients, groceries: Dict[str, WorkspaceName]): + pass + + def execute(self): + """ + Final step in a recipe, executes the queued algorithms. + Requires: queued algorithms. + """ + try: + self.mantidSnapper.executeQueue() + except AlgorithmException as e: + errorString = str(e) + raise RuntimeError(errorString) from e + + def cook(self, ingredients, groceries: Dict[str, str]) -> Dict[str, Any]: + """ + Main interface method for the recipe. + Given the ingredients and groceries, it prepares, executes and returns the final workspace. + """ + self.prep(ingredients, groceries) + self.execute() + return self.outputWS + + def cater(self, shipment: List[Pallet]) -> List[Dict[str, Any]]: + """ + A secondary interface method for the recipe. + It is a batched version of cook. + Given a shipment of ingredients and groceries, it prepares, executes and returns the final workspaces. + """ + output = [] + for ingredients, grocery in shipment: + self.prep(ingredients, grocery) + output.append(self.outputWS) + self.execute() + return output diff --git a/src/snapred/backend/recipe/PixelGroupingParametersCalculationRecipe.py b/src/snapred/backend/recipe/PixelGroupingParametersCalculationRecipe.py index 6569051f5..44632afb1 100644 --- a/src/snapred/backend/recipe/PixelGroupingParametersCalculationRecipe.py +++ b/src/snapred/backend/recipe/PixelGroupingParametersCalculationRecipe.py @@ -36,7 +36,7 @@ def executeRecipe( "Calling algorithm", Ingredients=ingredients.json(), GroupingWorkspace=groceries["groupingWorkspace"], - MaskWorkspace=groceries.get("MaskWorkspace", ""), + MaskWorkspace=groceries.get("maskWorkspace", ""), ) self.mantidSnapper.executeQueue() # NOTE contradictory issues with Callbacks between GUI and unit tests diff --git a/src/snapred/backend/recipe/Recipe.py b/src/snapred/backend/recipe/Recipe.py index a938207ca..8ac9ce389 100644 --- a/src/snapred/backend/recipe/Recipe.py +++ b/src/snapred/backend/recipe/Recipe.py @@ -47,7 +47,7 @@ def unbagGroceries(self, groceries: Dict[str, WorkspaceName]): @abstractmethod def queueAlgos(self): """ - Queues up the procesing algorithms for the recipe. + Queues up the processing algorithms for the recipe. Requires: unbagged groceries and chopped ingredients. """ diff --git a/src/snapred/backend/recipe/ReductionGroupProcessingRecipe.py b/src/snapred/backend/recipe/ReductionGroupProcessingRecipe.py index c39016948..b26a19f5d 100644 --- a/src/snapred/backend/recipe/ReductionGroupProcessingRecipe.py +++ b/src/snapred/backend/recipe/ReductionGroupProcessingRecipe.py @@ -28,31 +28,6 @@ def queueAlgos(self): Queues up the processing algorithms for the recipe. Requires: unbagged groceries. """ - # TODO: This is all subject to change based on EWM 4798 - # if self.rawInput is not None: - # logger.info("Processing Reduction Group...") - # estimateGeometryAlgo = EstimateFocusedInstrumentGeometry() - # estimateGeometryAlgo.initialize() - # estimateGeometryAlgo.setProperty("GroupingWorkspace", self.groupingWS) - # estimateGeometryAlgo.setProperty("OutputWorkspace", self.geometryOutputWS) - # try: - # estimateGeometryAlgo.execute() - # data["focusParams"] = estimateGeometryAlgo.getPropertyValue("FocusParams") - # except RuntimeError as e: - # errorString = str(e) - # raise RuntimeError(errorString) from e - # else: - # raise NotImplementedError - - # self.mantidSnapper.EditInstrumentGeometry( - # "Editing Instrument Geometry...", - # Workspace=self.geometryOutputWS, - # L2=data["focusParams"].L2, - # Polar=data["focusParams"].Polar, - # Azimuthal=data["focusParams"].Azimuthal, - # ) - # self.rawInput = self.geometryOutputWS - self.mantidSnapper.ConvertUnits( "Converting to TOF...", InputWorkspace=self.rawInput, diff --git a/src/snapred/backend/recipe/ReductionRecipe.py b/src/snapred/backend/recipe/ReductionRecipe.py index cc6253e22..657078bb3 100644 --- a/src/snapred/backend/recipe/ReductionRecipe.py +++ b/src/snapred/backend/recipe/ReductionRecipe.py @@ -6,10 +6,12 @@ from snapred.backend.recipe.GenerateFocussedVanadiumRecipe import GenerateFocussedVanadiumRecipe from snapred.backend.recipe.GenericRecipe import ArtificialNormalizationRecipe from snapred.backend.recipe.PreprocessReductionRecipe import PreprocessReductionRecipe +from snapred.backend.recipe.EffectiveInstrumentRecipe import EffectiveInstrumentRecipe from snapred.backend.recipe.Recipe import Recipe, WorkspaceName from snapred.backend.recipe.ReductionGroupProcessingRecipe import ReductionGroupProcessingRecipe from snapred.meta.mantid.WorkspaceNameGenerator import ValueFormatter as wnvf from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceNameGenerator as wng +from snapred.meta.Config import Config logger = snapredLogger.getLogger(__name__) @@ -61,7 +63,7 @@ def unbagGroceries(self, groceries: Dict[str, Any]): self.groceries = groceries.copy() self.sampleWs = groceries["inputWorkspace"] self.normalizationWs = groceries.get("normalizationWorkspace", "") - self.maskWs = groceries.get("maskWorkspace", "") + self.maskWs = groceries.get("combinedMask", "") self.groupingWorkspaces = groceries["groupingWorkspaces"] def _cloneWorkspace(self, inputWorkspace: str, outputWorkspace: str) -> str: @@ -274,6 +276,14 @@ def execute(self): ) self._cloneIntermediateWorkspace(sampleClone, f"sample_ApplyNormalization_{groupingIndex}") + # 5. Replace the instrument with the effective instrument for this grouping + if Config["reduction.output.useEffectiveInstrument"]: + self._applyRecipe( + EffectiveInstrumentRecipe, + self.ingredients.effectiveInstrument(groupingIndex), + inputWorkspace=sampleClone, + ) + # Cleanup outputs.append(sampleClone) diff --git a/src/snapred/backend/service/ReductionService.py b/src/snapred/backend/service/ReductionService.py index 67ce7784e..79a81fd70 100644 --- a/src/snapred/backend/service/ReductionService.py +++ b/src/snapred/backend/service/ReductionService.py @@ -1,7 +1,7 @@ +from typing import Any, Dict, List, Optional import json from collections.abc import Iterable from pathlib import Path -from typing import Any, Dict, List from snapred.backend.dao.ingredients import ( ArtificialNormalizationIngredients, @@ -183,10 +183,12 @@ def reduction(self, request: ReductionRequest): groupingResults = self.fetchReductionGroupings(request) request.focusGroups = groupingResults["focusGroups"] - ingredients = self.prepReductionIngredients(request) - ingredients.artificialNormalizationIngredients = request.artificialNormalizationIngredients + # Fetch groceries first: `prepReductionIngredients` will need the combined mask. groceries = self.fetchReductionGroceries(request) + + ingredients = self.prepReductionIngredients(request, groceries.get("combinedPixelMask")) + # attach the list of grouping workspaces to the grocery dictionary groceries["groupingWorkspaces"] = groupingResults["groupingWorkspaces"] @@ -306,18 +308,13 @@ def prepCombinedMask( return combinedMask @FromString - def prepReductionIngredients(self, request: ReductionRequest) -> ReductionIngredients: + def prepReductionIngredients(self, request: ReductionRequest, combinedPixelMask: Optional[WorkspaceName] = None) -> ReductionIngredients: """ Prepare the needed ingredients for calculating reduction. Requires: - - runNumber - - lite mode flag - - timestamp - - at least one focus group specified - - a smoothing parameter - - a calibrant sample path - - a peak threshold + - reduction request + - an optional combined mask workspace :param request: a reduction request :type request: ReductionRequest @@ -334,7 +331,9 @@ def prepReductionIngredients(self, request: ReductionRequest) -> ReductionIngred versions=request.versions, ) # TODO: Skip calibrant sample if there is no calibrant - return self.sousChef.prepReductionIngredients(farmFresh) + ingredients = self.sousChef.prepReductionIngredients(farmFresh, combinedPixelMask) + ingredients.artificialNormalizationIngredients = request.artificialNormalizationIngredients + return ingredients @FromString def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: @@ -353,7 +352,7 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: - "inputworkspace" - "diffcalWorkspace" - "normalizationWorkspace" - - "maskWorkspace" + - "combinedPixelMask" :rtype: Dict[str, Any] """ @@ -372,7 +371,7 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: # Fetch pixel masks residentMasks = {} - combinedMask = None + combinedPixelMask = None if request.pixelMasks: for mask in request.pixelMasks: match mask.tokens("workspaceType"): @@ -388,7 +387,7 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: raise RuntimeError( f"reduction pixel mask '{mask}' has unexpected workspace-type '{mask.tokens('workspaceType')}'" # noqa: E501 ) - if calVersion: + if calVersion is not None: # WARNING: version may be _zero_! self.groceryClerk.name("diffcalMaskWorkspace").diffcal_mask(request.runNumber, calVersion).useLiteMode( request.useLiteMode ).add() @@ -398,19 +397,19 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: **residentMasks, ) # combine all of the pixel masks, for application and final output - combinedMask = self.prepCombinedMask( + combinedPixelMask = self.prepCombinedMask( request.runNumber, request.useLiteMode, request.timestamp, maskGroceries.values() ) # gather the input workspace and the diffcal table self.groceryClerk.name("inputWorkspace").neutron(request.runNumber).useLiteMode(request.useLiteMode).add() - if calVersion: + if calVersion is not None: self.groceryClerk.name("diffcalWorkspace").diffcal_table(request.runNumber, calVersion).useLiteMode( request.useLiteMode ).add() - if normVersion: + if normVersion is not None: # WARNING: version may be _zero_! self.groceryClerk.name("normalizationWorkspace").normalization(request.runNumber, normVersion).useLiteMode( request.useLiteMode ).add() @@ -421,7 +420,7 @@ def fetchReductionGroceries(self, request: ReductionRequest) -> Dict[str, Any]: ) groceries = self.groceryService.fetchGroceryDict( groceryDict=self.groceryClerk.buildDict(), - **({"maskWorkspace": combinedMask} if combinedMask else {}), + **({"combinedPixelMask": combinedPixelMask} if combinedPixelMask else {}), ) self._markWorkspaceMetadata(request, groceries["inputWorkspace"]) diff --git a/src/snapred/backend/service/SousChef.py b/src/snapred/backend/service/SousChef.py index 768ec7258..61f152861 100644 --- a/src/snapred/backend/service/SousChef.py +++ b/src/snapred/backend/service/SousChef.py @@ -1,7 +1,7 @@ +from typing import Dict, List, Optional, Tuple import os from copy import deepcopy from pathlib import Path -from typing import Dict, List, Tuple import pydantic @@ -29,6 +29,7 @@ from snapred.backend.recipe.PixelGroupingParametersCalculationRecipe import PixelGroupingParametersCalculationRecipe from snapred.backend.service.CrystallographicInfoService import CrystallographicInfoService from snapred.backend.service.Service import Service +from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceName from snapred.meta.Config import Config from snapred.meta.decorators.Singleton import Singleton @@ -94,9 +95,9 @@ def prepFocusGroup(self, ingredients: FarmFreshIngredients) -> FocusGroup: groupingMap = self.dataFactoryService.getGroupingMap(ingredients.runNumber) return groupingMap.getMap(ingredients.useLiteMode)[ingredients.focusGroup.name] - def prepPixelGroup(self, ingredients: FarmFreshIngredients) -> PixelGroup: + def prepPixelGroup(self, ingredients: FarmFreshIngredients, pixelMask: Optional[WorkspaceName] = None) -> PixelGroup: groupingSchema = ingredients.focusGroup.name - key = (ingredients.runNumber, ingredients.useLiteMode, groupingSchema) + key = (ingredients.runNumber, ingredients.useLiteMode, groupingSchema, pixelMask) if key not in self._pixelGroupCache: focusGroup = self.prepFocusGroup(ingredients) instrumentState = self.prepInstrumentState(ingredients) @@ -107,7 +108,10 @@ def prepPixelGroup(self, ingredients: FarmFreshIngredients) -> PixelGroup: self.groceryClerk.name("groupingWorkspace").fromRun(ingredients.runNumber).grouping( focusGroup.name ).useLiteMode(ingredients.useLiteMode).add() - groceries = self.groceryService.fetchGroceryDict(self.groceryClerk.buildDict()) + groceries = self.groceryService.fetchGroceryDict( + self.groceryClerk.buildDict(), + maskWorkspace=pixelMask + ) data = PixelGroupingParametersCalculationRecipe().executeRecipe(pixelIngredients, groceries) self._pixelGroupCache[key] = PixelGroup( @@ -118,12 +122,12 @@ def prepPixelGroup(self, ingredients: FarmFreshIngredients) -> PixelGroup: ) return deepcopy(self._pixelGroupCache[key]) - def prepManyPixelGroups(self, ingredients: FarmFreshIngredients) -> List[PixelGroup]: + def prepManyPixelGroups(self, ingredients: FarmFreshIngredients, pixelMask: Optional[WorkspaceName] = None) -> List[PixelGroup]: pixelGroups = [] ingredients_ = ingredients.model_copy() for focusGroup in ingredients.focusGroups: ingredients_.focusGroup = focusGroup - pixelGroups.append(self.prepPixelGroup(ingredients_)) + pixelGroups.append(self.prepPixelGroup(ingredients_, pixelMask)) return pixelGroups def _getInstrumentDefinitionFilename(self, useLiteMode: bool) -> str: @@ -232,7 +236,7 @@ def _pullManyCalibrationDetectorPeaks( def _pullNormalizationRecordFFI( self, ingredients: FarmFreshIngredients, - ) -> Tuple[FarmFreshIngredients, float]: + ) -> Tuple[FarmFreshIngredients, float, Optional[str]]: normalizationRecord = self.dataFactoryService.getNormalizationRecord( ingredients.runNumber, ingredients.useLiteMode, ingredients.versions.normalization ) @@ -244,7 +248,7 @@ def _pullNormalizationRecordFFI( # TODO: Should smoothing parameter be an ingredient? return ingredients, smoothingParameter, calibrantSamplePath - def prepReductionIngredients(self, ingredients: FarmFreshIngredients) -> ReductionIngredients: + def prepReductionIngredients(self, ingredients: FarmFreshIngredients, combinedPixelMask: Optional[WorkspaceName] = None) -> ReductionIngredients: ingredients_ = ingredients.model_copy() # some of the reduction ingredients MUST match those used in the calibration/normalization processes ingredients_ = self._pullCalibrationRecordFFI(ingredients_) @@ -255,7 +259,8 @@ def prepReductionIngredients(self, ingredients: FarmFreshIngredients) -> Reducti runNumber=ingredients_.runNumber, useLiteMode=ingredients_.useLiteMode, timestamp=ingredients_.timestamp, - pixelGroups=self.prepManyPixelGroups(ingredients_), + pixelGroups=self.prepManyPixelGroups(ingredients_, combinedPixelMask), + unmaskedPixelGroups=self.prepManyPixelGroups(ingredients_), smoothingParameter=smoothingParameter, calibrantSamplePath=ingredients_.calibrantSamplePath, peakIntensityThreshold=self._getThresholdFromCalibrantSample(ingredients_.calibrantSamplePath), diff --git a/src/snapred/resources/application.yml b/src/snapred/resources/application.yml index 8d6c5ba56..8b8807fe3 100644 --- a/src/snapred/resources/application.yml +++ b/src/snapred/resources/application.yml @@ -96,6 +96,12 @@ calibration: fitting: minSignal2Noise: 0.0 +reduction: + output: + extension: .nxs + # convert the instrument for the output workspaces into the reduced form + useEffectiveInstrument: true + mantid: workspace: nameTemplate: diff --git a/src/snapred/ui/workflow/DiffCalWorkflow.py b/src/snapred/ui/workflow/DiffCalWorkflow.py index 00e55f8f4..e492f98fb 100644 --- a/src/snapred/ui/workflow/DiffCalWorkflow.py +++ b/src/snapred/ui/workflow/DiffCalWorkflow.py @@ -381,7 +381,7 @@ def purgeBadPeaks(self, maxChiSq): self._tweakPeakView, "Too Few Peaks", "Purging would result in fewer than the required 2 peaks for calibration. " - "The current set of peaks will be retained.", + + "The current set of peaks will be retained.", QMessageBox.Ok, ) else: diff --git a/tests/cis_tests/effective_instrument_script.py b/tests/cis_tests/effective_instrument_script.py new file mode 100644 index 000000000..8b287615d --- /dev/null +++ b/tests/cis_tests/effective_instrument_script.py @@ -0,0 +1,116 @@ +from datetime import datetime +from functools import partial +import math +import numpy as np +from pathlib import Path +import re +import sys + +from mantid.simpleapi import mtd + +import snapred +SNAPRed_module_root = Path(snapred.__file__).parent.parent + +from snapred.backend.dao.request.FarmFreshIngredients import FarmFreshIngredients +from snapred.backend.dao.request.ReductionRequest import ReductionRequest +from snapred.backend.data.DataFactoryService import DataFactoryService +from snapred.backend.service.SousChef import SousChef +from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceNameGenerator as wng +from snapred.meta.Config import Config + +# ----------------------------- +# Test helper utility routines: +sys.path.insert(0, str(Path(SNAPRed_module_root).parent / 'tests')) +from util.IPTS_override import IPTS_override +# from util.helpers import timestampFromString + +def timestampFromString(timestamp_str) -> float: + # Recover a float timestamp from a non-isoformat timestamp string + regx = re.compile(r"([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2})([0-9]{2})([0-9]{2})") + Y, M, D, H, m, s = tuple([int(s) for s in regx.match(timestamp_str).group(1, 2, 3, 4, 5, 6)]) + return datetime(Y, M, D, H, m, s).timestamp() + +######################################################## +# If necessary, override the IPTS search directories: ## +######################################################## +with IPTS_override(): # defaults to `Config["IPTS.root"]` + + ###################################################################################################################### + # Step 1: Generate a set of reduction data. Take a look under the output folder and see what its timestamp string is. + ###################################################################################################################### + + runNumber = "46680" + useLiteMode = True + timestamp_str = "2024-09-27T154022" # Unfortunately, not in iso format + timestamp = timestampFromString(timestamp_str) + + ################################################################################## + # Step 2: Reload the reduction record, and all of the reduction output workspaces. + ################################################################################## + dataService = DataFactoryService() + sousChef = SousChef() + + reductionRecord = dataService.getReductionData(runNumber, useLiteMode, timestamp) + + ######################################################################################################## + # Step 3: Load the required grouping workspaces, and compute their _unmasked_ pixel-grouping parameters. + # (Note here that the `ReductionRecord` retains only the _masked_ PGP.) + ######################################################################################################## + + # ... this duplicates the setup part of the reduction process ... + groupingMap = dataService.getGroupingMap(runNumber).getMap(useLiteMode) + request = ReductionRequest( + runNumber=runNumber, + useLiteMode=useLiteMode, + timestamp=timestamp, + focusGroups = list(groupingMap.values()), + keepUnfocused=False, + convertUnitsTo="TOF" + ) + farmFresh = FarmFreshIngredients( + runNumber=request.runNumber, + useLiteMode=request.useLiteMode, + timestamp=request.timestamp, + focusGroups=request.focusGroups, + keepUnfocused=request.keepUnfocused, + convertUnitsTo=request.convertUnitsTo, + versions=request.versions, + ) + ingredients = sousChef.prepReductionIngredients(farmFresh) + # ... now the required PGP are available in `ingredients.unmaskedPixelGroups: List[PixelGroup]` ... + unmaskedPixelGroups = {pg.focusGroup.name: pg for pg in ingredients.unmaskedPixelGroups} + + ################################################################################################################# + # Step 4: For the output workspace corresponding to each grouping, verify that the effective instrument consists + # of one pixel per group-id, with its location matching the _unmasked_ PGP for that grouping and group-id. + ################################################################################################################# + + # For each grouping, verify that the output workspace's effective instrument has been set up as expected. + for grouping in unmaskedPixelGroups: + # We need to rebuild the workspace name, because the `WorkspaceName` of the loaded `ReductionRecord` will only retain its string component. + reducedOutputWs = wng.reductionOutput().runNumber(runNumber).group(grouping).timestamp(timestamp).build() + assert reducedOutputWs in reductionRecord.workspaceNames + assert mtd.doesExist(reducedOutputWs) + + outputWs = mtd[reducedOutputWs] + + effectiveInstrument = outputWs.getInstrument() + + # verify the new instrument name + breakpoint() + assert effectiveInstrument.getName() == f"SNAP_{grouping}" + + # there should be one pixel per output spectrum + assert effectiveInstrument.getNumberDetectors(True) == outputWs.getNumberHistograms() + + detectorInfo = outputWs.detectorInfo() + pixelGroup = unmaskedPixelGroups[grouping] + + isclose = partial(math.isclose, rel_tol=np.finfo(float).eps, abs_tol=np.finfo(float).eps) + for n, gid in pixelGroup.groupIDs(): + # Pixel ids should be the same as the group ids. + index = detectorIndex.indexOf(int(gid)) + + assert isclose(pixelGroup.L2[n], detectorInfo.l2(index)) + assert isclose(pixelGroup.twoTheta[n], detectorInfo.twoTheta(index)) + assert isclose(pixelGroup.azimuth[n], detectorInfo.azimuthal(index)) diff --git a/tests/resources/application.yml b/tests/resources/application.yml index 9517ed290..71730974e 100644 --- a/tests/resources/application.yml +++ b/tests/resources/application.yml @@ -103,7 +103,12 @@ calibration: fitting: minSignal2Noise: 10 - +reduction: + output: + extension: .nxs + # convert the instrument for the output workspaces into the reduced form + useEffectiveInstrument: true + mantid: workspace: nameTemplate: diff --git a/tests/resources/inputs/calibration/ReductionIngredients.json b/tests/resources/inputs/calibration/ReductionIngredients.json index ffabf2717..5439e6827 100644 --- a/tests/resources/inputs/calibration/ReductionIngredients.json +++ b/tests/resources/inputs/calibration/ReductionIngredients.json @@ -2,6 +2,7 @@ "runNumber": "57514", "useLiteMode": true, "timestamp": 1722893493.2375631, + "pixelGroups": [ { "pixelGroupingParameters": { @@ -200,6 +201,207 @@ "binningMode": -1 } ], + + "unmaskedPixelGroups": [ + { + "pixelGroupingParameters": { + "1": { + "groupID": 1, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.4888167719149363, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.35983386414596874, + "maximum": 4.355830260153734 + }, + "dRelativeResolution": 0.009342020503289764 + } + }, + "nBinsAcrossPeakWidth": 10, + "focusGroup": { + "name": "All", + "definition": "/SNS/users/wqp/SNAP/shared/Calibration/Powder/PixelGroupingDefinitions/SNAPFocGroup_All.lite.hdf" + }, + "timeOfFlight": { + "minimum": 2546.742533573784, + "binWidth": 0.0009342020503289763, + "maximum": 12000, + "binningMode": -1 + }, + "binningMode": -1 + }, + { + "pixelGroupingParameters": { + "1": { + "groupID": 1, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.8230747956560218, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.35983386414596874, + "maximum": 2.3643065241537378 + }, + "dRelativeResolution": 0.006510460909679324 + }, + "2": { + "groupID": 2, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.1545587481738246, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.46173472918300634, + "maximum": 4.355830260153734 + }, + "dRelativeResolution": 0.01149611207805968 + } + }, + "nBinsAcrossPeakWidth": 10, + "focusGroup": { + "name": "Bank", + "definition": "/SNS/users/wqp/SNAP/shared/Calibration/Powder/PixelGroupingDefinitions/SNAPFocGroup_Bank.lite.hdf" + }, + "timeOfFlight": { + "minimum": 2546.742533573784, + "binWidth": 0.0006510460909679324, + "maximum": 12000, + "binningMode": -1 + }, + "binningMode": -1 + }, + { + "pixelGroupingParameters": { + "1": { + "groupID": 1, + "isMasked": false, + "L2": 10.0, + "twoTheta": 2.1108948177427838, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.35983386414596874, + "maximum": 1.8446004591300944 + }, + "dRelativeResolution": 0.005285822142225365 + }, + "2": { + "groupID": 2, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.82310673131693, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.3897805853492433, + "maximum": 2.0555564929623205 + }, + "dRelativeResolution": 0.006281306679209721 + }, + "3": { + "groupID": 3, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.5352228379083572, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.43925160115545075, + "maximum": 2.3643065241537378 + }, + "dRelativeResolution": 0.007730690425302433 + }, + "4": { + "groupID": 4, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.440276389170165, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.46173472918300634, + "maximum": 2.5221224737819017 + }, + "dRelativeResolution": 0.008321708397955027 + }, + "5": { + "groupID": 5, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.1543988461042238, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.5352414154696261, + "maximum": 3.194134388808033 + }, + "dRelativeResolution": 0.01064966864886483 + }, + "6": { + "groupID": 6, + "isMasked": false, + "L2": 10.0, + "twoTheta": 0.8690010092470938, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.6591133268028296, + "maximum": 4.355830260153734 + }, + "dRelativeResolution": 0.014622431594735495 + } + }, + "nBinsAcrossPeakWidth": 10, + "focusGroup": { + "name": "Column", + "definition": "/SNS/users/wqp/SNAP/shared/Calibration/Powder/PixelGroupingDefinitions/SNAPFocGroup_Column.lite.hdf" + }, + "timeOfFlight": { + "minimum": 2546.742533573784, + "binWidth": 0.0005285822142225365, + "maximum": 12000, + "binningMode": -1 + }, + "binningMode": -1 + }, + { + "pixelGroupingParameters": { + "1": { + "groupID": 1, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.7273751940345703, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.35983386414596874, + "maximum": 2.5221224737819017 + }, + "dRelativeResolution": 0.007007302163279077 + }, + "2": { + "groupID": 2, + "isMasked": false, + "L2": 10.0, + "twoTheta": 1.011699927675655, + "azimuth": 0.0, + "dResolution": { + "minimum": 0.5352414154696261, + "maximum": 4.355830260153734 + }, + "dRelativeResolution": 0.012791226447712764 + } + }, + "nBinsAcrossPeakWidth": 10, + "focusGroup": { + "name": "2_4", + "definition": "/SNS/users/wqp/SNAP/shared/Calibration/Powder/PixelGroupingDefinitions/SNAPFocGroup_2_4.lite.hdf" + }, + "timeOfFlight": { + "minimum": 2546.742533573784, + "binWidth": 0.0007007302163279077, + "maximum": 12000, + "binningMode": -1 + }, + "binningMode": -1 + } + ], + + "detectorPeaksMany": [ [ { diff --git a/tests/unit/backend/data/test_DataFactoryService.py b/tests/unit/backend/data/test_DataFactoryService.py index a25e67452..f8955e785 100644 --- a/tests/unit/backend/data/test_DataFactoryService.py +++ b/tests/unit/backend/data/test_DataFactoryService.py @@ -1,8 +1,7 @@ import hashlib -import unittest -import unittest.mock as mock from pathlib import Path from random import randint +import time from mantid.simpleapi import CreateSingleValuedWorkspace, DeleteWorkspace, mtd from snapred.backend.dao.calibration import Calibration @@ -14,6 +13,8 @@ from snapred.backend.data.DataFactoryService import DataFactoryService from snapred.backend.data.LocalDataService import LocalDataService +import unittest +import unittest.mock as mock class TestDataFactoryService(unittest.TestCase): def expected(cls, *args): @@ -79,6 +80,7 @@ def setUpClass(cls): def setUp(self): self.version = randint(2, 120) + self.timestamp = time.time() self.instance = DataFactoryService() self.instance.lookupService = self.mockLookupService assert isinstance(self.instance, DataFactoryService) @@ -273,18 +275,18 @@ def test_getCompatibleReductionMasks(self): def test_getReductionDataPath(self): for useLiteMode in [True, False]: - actual = self.instance.getReductionDataPath("12345", useLiteMode, self.version) - assert actual == self.expected("12345", useLiteMode, self.version) + actual = self.instance.getReductionDataPath("12345", useLiteMode, self.timestamp) + assert actual == self.expected("12345", useLiteMode, self.timestamp) def test_getReductionRecord(self): for useLiteMode in [True, False]: - actual = self.instance.getReductionRecord("12345", useLiteMode, self.version) - assert actual == self.expected("12345", useLiteMode, self.version) + actual = self.instance.getReductionRecord("12345", useLiteMode, self.timestamp) + assert actual == self.expected("12345", useLiteMode, self.timestamp) def test_getReductionData(self): for useLiteMode in [True, False]: - actual = self.instance.getReductionData("12345", useLiteMode, self.version) - assert actual == self.expected("12345", useLiteMode, self.version) + actual = self.instance.getReductionData("12345", useLiteMode, self.timestamp) + assert actual == self.expected("12345", useLiteMode, self.timestamp) ##### TEST WORKSPACE METHODS #### diff --git a/tests/unit/backend/data/test_LocalDataService.py b/tests/unit/backend/data/test_LocalDataService.py index d74adaff4..b153c9e95 100644 --- a/tests/unit/backend/data/test_LocalDataService.py +++ b/tests/unit/backend/data/test_LocalDataService.py @@ -2,6 +2,7 @@ import importlib import json import logging +import numpy as np import os import re import socket @@ -27,6 +28,7 @@ CreateSampleWorkspace, DeleteWorkspace, DeleteWorkspaces, + EditInstrumentGeometry, GroupWorkspaces, LoadEmptyInstrument, LoadInstrument, @@ -531,6 +533,14 @@ def test_stateExists(): assert localDataService.stateExists("12345") +def test_stateExists_not(): + # Test that the 'stateExists' method returns False when the state doesn't exist. + localDataService = LocalDataService() + localDataService.constructCalibrationStateRoot = mock.Mock(return_value=Path("a/non-existent/path")) + localDataService.generateStateId = mock.Mock(return_value=(ENDURING_STATE_ID, None)) + assert not localDataService.stateExists("12345") + + @mock.patch(ThisService + "GetIPTS") def test_calibrationFileExists(GetIPTS): # noqa ARG002 localDataService = LocalDataService() @@ -1566,8 +1576,8 @@ def _createWorkspaces(wss: List[WorkspaceName]): OutputWorkspace=src, Function="One Peak", NumBanks=1, - NumMonitors=1, - BankPixelWidth=5, + NumMonitors=0, + BankPixelWidth=4, NumEvents=500, Random=True, XUnit="DSP", @@ -1580,18 +1590,54 @@ def _createWorkspaces(wss: List[WorkspaceName]): Filename=fakeInstrumentFilePath, RewriteSpectraMap=True, ) + + # Mask workspace uses legacy instrument + mask = mtd.unique_hidden_name() + createCompatibleMask(mask, src) + + if Config["reduction.output.useEffectiveInstrument"]: + # Convert the source workspace's instrument to the reduced form: + # * no monitors; + # * only one bank of detectors; + # * no parameter map. + + detectorInfo = mtd[src].detectorInfo() + l2s, twoThetas, azimuths = [], [], [] + for n in range(detectorInfo.size()): + if detectorInfo.isMonitor(n): + continue + + l2 = detectorInfo.l2(n) + twoTheta = detectorInfo.twoTheta(n) + + # See: defect EWM#7384 + try: + azimuth = detectorInfo.azimuthal(n) + except RuntimeError as e: + if not str(e).startswith("Failed to create up axis"): + raise + azimuth = 0.0 + l2s.append(l2) + twoThetas.append(twoTheta) + azimuths.append(azimuth) + + EditInstrumentGeometry( + Workspace=src, + L2=np.rad2deg(l2s), + Polar=np.rad2deg(twoThetas), + Azimuthal=np.rad2deg(azimuths) + ) assert mtd.doesExist(src) + for ws in wss: - wsType = ws.tokens("workspaceType") - match wsType: - case wngt.REDUCTION_PIXEL_MASK: - createCompatibleMask(ws, src) - case _: - CloneWorkspace(OutputWorkspace=ws, InputWorkspace=src) + CloneWorkspace( + OutputWorkspace=ws, + InputWorkspace=src if ws.tokens("workspaceType") != wngt.REDUCTION_PIXEL_MASK else mask + ) assert mtd.doesExist(ws) cleanup_workspace_at_exit(ws) - DeleteWorkspace(Workspace=src) + DeleteWorkspaces([src, mask]) return wss yield _createWorkspaces @@ -1625,10 +1671,42 @@ def test_writeReductionData(readSyntheticReductionRecord, createReductionWorkspa # `writeReductionRecord` must be called first localDataService.writeReductionRecord(testRecord) localDataService.writeReductionData(testRecord) - + assert reductionFilePath.exists() +def test_writeReductionData_legacy_instrument(readSyntheticReductionRecord, createReductionWorkspaces): + # Test that the special `Config` setting allows the saving of workspaces with non-reduced instruments + + # In order to facilitate parallel testing: any workspace name used by this test should be unique. + inputRecordFilePath = Path(Resource.getPath("inputs/reduction/ReductionRecord_20240614T130420.json")) + _uniqueTimestamp = 1731518208.172797 + testRecord = readSyntheticReductionRecord(inputRecordFilePath, _uniqueTimestamp) + + # Temporarily use a single run number + runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp + stateId = ENDURING_STATE_ID + fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() + fileName += Config["nexus.file.extension"] + + with Config_override("reduction.output.useEffectiveInstrument", False): + wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 + localDataService = LocalDataService() + with reduction_root_redirect(localDataService, stateId=stateId): + localDataService.instrumentConfig = mock.Mock() + localDataService.getIPTS = mock.Mock(return_value="IPTS-12345") + + # Important to this test: use a path that doesn't already exist + reductionFilePath = localDataService._constructReductionRecordFilePath(runNumber, useLiteMode, timestamp) + assert not reductionFilePath.exists() + + # `writeReductionRecord` must be called first + localDataService.writeReductionRecord(testRecord) + localDataService.writeReductionData(testRecord) + + assert reductionFilePath.exists() + + def test_writeReductionData_no_directories(readSyntheticReductionRecord, createReductionWorkspaces): # noqa: ARG001 # In order to facilitate parallel testing: any workspace name used by this test should be unique. inputRecordFilePath = Path(Resource.getPath("inputs/reduction/ReductionRecord_20240614T130420.json")) @@ -1667,7 +1745,7 @@ def test_writeReductionData_metadata(readSyntheticReductionRecord, createReducti runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp stateId = ENDURING_STATE_ID fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 localDataService = LocalDataService() @@ -1701,7 +1779,7 @@ def test_readWriteReductionData(readSyntheticReductionRecord, createReductionWor runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp stateId = ENDURING_STATE_ID fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 localDataService = LocalDataService() @@ -1719,7 +1797,7 @@ def test_readWriteReductionData(readSyntheticReductionRecord, createReductionWor filePath = reductionRecordFilePath.parent / fileName assert filePath.exists() - + # move the existing test workspaces out of the way: # * this just adds the `_uniquePrefix` one more time. RenameWorkspaces(InputWorkspaces=wss, Prefix=_uniquePrefix) @@ -1739,10 +1817,65 @@ def test_readWriteReductionData(readSyntheticReductionRecord, createReductionWor equal, _ = CompareWorkspaces( Workspace1=ws, Workspace2=_uniquePrefix + ws, + CheckAllData=True ) assert equal +def test_readWriteReductionData_legacy_instrument(readSyntheticReductionRecord, createReductionWorkspaces, cleanup_workspace_at_exit): + # In order to facilitate parallel testing: any workspace name used by this test should be unique. + _uniquePrefix = "_test_RWRD_" + inputRecordFilePath = Path(Resource.getPath("inputs/reduction/ReductionRecord_20240614T130420.json")) + _uniqueTimestamp = 1731519071.6706867 + testRecord = readSyntheticReductionRecord(inputRecordFilePath, _uniqueTimestamp) + + runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp + stateId = ENDURING_STATE_ID + fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() + fileName += Config["reduction.output.extension"] + + with Config_override("reduction.output.useEffectiveInstrument", False): + wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 + localDataService = LocalDataService() + with reduction_root_redirect(localDataService, stateId=stateId): + localDataService.instrumentConfig = mock.Mock() + localDataService.getIPTS = mock.Mock(return_value="IPTS-12345") + + # Important to this test: use a path that doesn't already exist + reductionRecordFilePath = localDataService._constructReductionRecordFilePath(runNumber, useLiteMode, timestamp) + assert not reductionRecordFilePath.exists() + + # `writeReductionRecord` needs to be called first + localDataService.writeReductionRecord(testRecord) + localDataService.writeReductionData(testRecord) + + filePath = reductionRecordFilePath.parent / fileName + assert filePath.exists() + + # move the existing test workspaces out of the way: + # * this just adds the `_uniquePrefix` one more time. + RenameWorkspaces(InputWorkspaces=wss, Prefix=_uniquePrefix) + # append to the cleanup list + for ws in wss: + cleanup_workspace_at_exit(_uniquePrefix + ws) + + actualRecord = localDataService.readReductionData(runNumber, useLiteMode, timestamp) + assert actualRecord == testRecord + + # workspaces should have been reloaded with their original names + # Implementation note: + # * the workspaces must match _exactly_ here, so `CompareWorkspaces` must be used; + # please do _not_ replace this with one of the `assert_almost_equal` methods: + # -- they do not necessarily do what you think they should do... + for ws in actualRecord.workspaceNames: + equal, _ = CompareWorkspaces( + Workspace1=ws, + Workspace2=_uniquePrefix + ws, + CheckAllData=True + ) + assert equal + + def test_readWriteReductionData_pixel_mask( readSyntheticReductionRecord, createReductionWorkspaces, cleanup_workspace_at_exit ): @@ -1755,7 +1888,7 @@ def test_readWriteReductionData_pixel_mask( runNumber, useLiteMode, timestamp = testRecord.runNumber, testRecord.useLiteMode, testRecord.timestamp stateId = ENDURING_STATE_ID fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] wss = createReductionWorkspaces(testRecord.workspaceNames) # noqa: F841 localDataService = LocalDataService() with reduction_root_redirect(localDataService, stateId=stateId): @@ -1809,7 +1942,7 @@ def test__constructReductionDataFilePath(): stateId = ENDURING_STATE_ID testIPTS = "IPTS-12345" fileName = wng.reductionOutputGroup().runNumber(runNumber).timestamp(timestamp).build() - fileName += Config["nexus.file.extension"] + fileName += Config["reduction.output.extension"] expectedFilePath = ( Path(Config["instrument.reduction.home"].format(IPTS=testIPTS)) diff --git a/tests/unit/backend/recipe/test_EffectiveInstrumentRecipe.py b/tests/unit/backend/recipe/test_EffectiveInstrumentRecipe.py new file mode 100644 index 000000000..c4b1dba0f --- /dev/null +++ b/tests/unit/backend/recipe/test_EffectiveInstrumentRecipe.py @@ -0,0 +1,183 @@ +import numpy as np + +from snapred.backend.recipe.algorithm.Utensils import Utensils +from snapred.backend.recipe.EffectiveInstrumentRecipe import EffectiveInstrumentRecipe +from snapred.backend.dao.ingredients import EffectiveInstrumentIngredients as Ingredients +from snapred.backend.dao.state.FocusGroup import FocusGroup +from snapred.backend.dao.state.PixelGroup import PixelGroup +from snapred.meta.Config import Resource +from util.SculleryBoy import SculleryBoy + +from unittest import mock +import pytest + +class TestEffectiveInstrumentRecipe: + fakeInstrumentFilePath = Resource.getPath("inputs/testInstrument/fakeSNAP_Definition.xml") + sculleryBoy = SculleryBoy() + + @pytest.fixture(autouse=True) + def _setup(self): + self.ingredients = mock.Mock( + spec=Ingredients, + unmaskedPixelGroup=mock.Mock( + spec=PixelGroup, + L2=mock.Mock(), + twoTheta=mock.Mock(), + azimuth=mock.Mock(), + focusGroup=FocusGroup( + name="a_grouping", + definition="a/grouping/path" + ) + ) + ) + self.ingredients1 = mock.Mock( + spec=Ingredients, + unmaskedPixelGroup=mock.Mock( + spec=PixelGroup, + L2=mock.Mock(), + twoTheta=mock.Mock(), + azimuth=mock.Mock(), + focusGroup=FocusGroup( + name="another_grouping", + definition="another/grouping/path" + ) + ) + ) + self.ingredientss = [self.ingredients, self.ingredients1] + + + yield + + # teardown follows ... + pass + + def test_chopIngredients(self): + recipe = EffectiveInstrumentRecipe() + ingredients = self.ingredients + recipe.chopIngredients(ingredients) + assert recipe.unmaskedPixelGroup == ingredients.unmaskedPixelGroup + + def test_unbagGroceries(self): + recipe = EffectiveInstrumentRecipe() + groceries = {"inputWorkspace": mock.Mock(), "outputWorkspace": mock.Mock()} + recipe.unbagGroceries(groceries) + assert recipe.inputWS == groceries["inputWorkspace"] + assert recipe.outputWS == groceries["outputWorkspace"] + + def test_unbagGroceries_output_default(self): + recipe = EffectiveInstrumentRecipe() + groceries = {"inputWorkspace": mock.Mock()} + recipe.unbagGroceries(groceries) + assert recipe.inputWS == groceries["inputWorkspace"] + assert recipe.outputWS == groceries["inputWorkspace"] + + def test_queueAlgos(self): + recipe = EffectiveInstrumentRecipe() + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock(), "outputWorkspace": mock.Mock()} + recipe.prep(ingredients, groceries) + recipe.queueAlgos() + + queuedAlgos = recipe.mantidSnapper._algorithmQueue + + cloneWorkspaceTuple = queuedAlgos[0] + assert cloneWorkspaceTuple[0] == "CloneWorkspace" + assert cloneWorkspaceTuple[2]["InputWorkspace"] == groceries["inputWorkspace"] + assert cloneWorkspaceTuple[2]["OutputWorkspace"] == groceries["outputWorkspace"] + + editInstrumentGeometryTuple = queuedAlgos[1] + assert editInstrumentGeometryTuple[0] == "EditInstrumentGeometry" + assert editInstrumentGeometryTuple[2]["Workspace"] == groceries["outputWorkspace"] + + def test_queueAlgos_default(self): + recipe = EffectiveInstrumentRecipe() + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock()} + recipe.prep(ingredients, groceries) + recipe.queueAlgos() + + queuedAlgos = recipe.mantidSnapper._algorithmQueue + + editInstrumentGeometryTuple = queuedAlgos[0] + assert editInstrumentGeometryTuple[0] == "EditInstrumentGeometry" + assert editInstrumentGeometryTuple[2]["Workspace"] == groceries["inputWorkspace"] + + def test_cook(self): + utensils = Utensils() + mockSnapper = mock.Mock() + utensils.mantidSnapper = mockSnapper + recipe = EffectiveInstrumentRecipe(utensils=utensils) + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock(), "outputWorkspace": mock.Mock()} + + output = recipe.cook(ingredients, groceries) + + assert output == groceries["outputWorkspace"] + + assert mockSnapper.executeQueue.called + mockSnapper.CloneWorkspace.assert_called_once_with( + "Clone workspace for reduced instrument", + OutputWorkspace=groceries["outputWorkspace"], + InputWorkspace=groceries["inputWorkspace"] + ) + mockSnapper.EditInstrumentGeometry.assert_called_once_with( + f"Editing instrument geometry for grouping '{ingredients.unmaskedPixelGroup.focusGroup.name}'", + Workspace=groceries["outputWorkspace"], + L2=np.rad2deg(ingredients.unmaskedPixelGroup.L2), + Polar=np.rad2deg(ingredients.unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(ingredients.unmaskedPixelGroup.azimuth), + InstrumentName=f"SNAP_{ingredients.unmaskedPixelGroup.focusGroup.name}" + ) + + def test_cook_default(self): + utensils = Utensils() + mockSnapper = mock.Mock() + utensils.mantidSnapper = mockSnapper + recipe = EffectiveInstrumentRecipe(utensils=utensils) + ingredients = self.ingredients + groceries = {"inputWorkspace": mock.Mock()} + + output = recipe.cook(ingredients, groceries) + + assert output == groceries["inputWorkspace"] + + assert mockSnapper.executeQueue.called + mockSnapper.CloneWorkspace.assert_not_called() + mockSnapper.EditInstrumentGeometry.assert_called_once_with( + f"Editing instrument geometry for grouping '{ingredients.unmaskedPixelGroup.focusGroup.name}'", + Workspace=groceries["inputWorkspace"], + L2=np.rad2deg(ingredients.unmaskedPixelGroup.L2), + Polar=np.rad2deg(ingredients.unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(ingredients.unmaskedPixelGroup.azimuth), + InstrumentName=f"SNAP_{ingredients.unmaskedPixelGroup.focusGroup.name}" + ) + + def test_cater(self): + untensils = Utensils() + mockSnapper = mock.Mock() + untensils.mantidSnapper = mockSnapper + recipe = EffectiveInstrumentRecipe(utensils=untensils) + ingredientss = self.ingredientss + + groceriess = [{"inputWorkspace": mock.Mock()}, {"inputWorkspace": mock.Mock()}] + + recipe.cater(zip(ingredientss, groceriess)) + + assert mockSnapper.EditInstrumentGeometry.call_count == 2 + mockSnapper.EditInstrumentGeometry.assert_any_call( + f"Editing instrument geometry for grouping '{ingredientss[0].unmaskedPixelGroup.focusGroup.name}'", + Workspace=groceriess[0]["inputWorkspace"], + L2=np.rad2deg(ingredientss[0].unmaskedPixelGroup.L2), + Polar=np.rad2deg(ingredientss[0].unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(ingredientss[0].unmaskedPixelGroup.azimuth), + InstrumentName=f"SNAP_{ingredientss[0].unmaskedPixelGroup.focusGroup.name}" + ) + mockSnapper.EditInstrumentGeometry.assert_any_call( + f"Editing instrument geometry for grouping '{ingredientss[1].unmaskedPixelGroup.focusGroup.name}'", + Workspace=groceriess[1]["inputWorkspace"], + L2=np.rad2deg(ingredientss[1].unmaskedPixelGroup.L2), + Polar=np.rad2deg(ingredientss[1].unmaskedPixelGroup.twoTheta), + Azimuthal=np.rad2deg(ingredientss[1].unmaskedPixelGroup.azimuth), + InstrumentName=f"SNAP_{ingredientss[1].unmaskedPixelGroup.focusGroup.name}" + ) + diff --git a/tests/unit/backend/recipe/test_PreprocessReductionRecipe.py b/tests/unit/backend/recipe/test_PreprocessReductionRecipe.py index 08f97c4be..6de7e038b 100644 --- a/tests/unit/backend/recipe/test_PreprocessReductionRecipe.py +++ b/tests/unit/backend/recipe/test_PreprocessReductionRecipe.py @@ -1,5 +1,3 @@ -import unittest - from mantid.simpleapi import ( CreateEmptyTableWorkspace, CreateSampleWorkspace, @@ -7,20 +5,22 @@ mtd, ) from snapred.backend.recipe.algorithm.Utensils import Utensils -from snapred.backend.recipe.PreprocessReductionRecipe import Ingredients, PreprocessReductionRecipe +from snapred.backend.recipe.PreprocessReductionRecipe import PreprocessReductionRecipe +from snapred.backend.dao.ingredients import PreprocessReductionIngredients as Ingredients from snapred.meta.Config import Resource from util.helpers import createCompatibleMask from util.SculleryBoy import SculleryBoy +import unittest class PreprocessReductionRecipeTest(unittest.TestCase): fakeInstrumentFilePath = Resource.getPath("inputs/testInstrument/fakeSNAP_Definition.xml") sculleryBoy = SculleryBoy() def _make_groceries(self): - sampleWS = mtd.unique_name(prefix="test_applynorm") - calibWS = mtd.unique_name(prefix="test_applynorm") - maskWS = mtd.unique_name(prefix="test_applynorm") + sampleWS = mtd.unique_name(prefix="test_preprocess_reduction") + calibWS = mtd.unique_name(prefix="test_preprocess_reduction") + maskWS = mtd.unique_name(prefix="test_preprocess_reduction") # Create sample workspace: # * warning: `createCompatibleMask` does not work correctly with @@ -97,10 +97,10 @@ def test_queueAlgos(self): assert applyDiffCalTuple[2]["CalibrationWorkspace"] == groceries["diffcalWorkspace"] def test_cook(self): - untensils = Utensils() + utensils = Utensils() mockSnapper = unittest.mock.Mock() - untensils.mantidSnapper = mockSnapper - recipe = PreprocessReductionRecipe(utensils=untensils) + utensils.mantidSnapper = mockSnapper + recipe = PreprocessReductionRecipe(utensils=utensils) ingredients = Ingredients() groceries = self._make_groceries() del groceries["maskWorkspace"] diff --git a/tests/unit/backend/recipe/test_ReductionRecipe.py b/tests/unit/backend/recipe/test_ReductionRecipe.py index e7f543c6a..a668bc06b 100644 --- a/tests/unit/backend/recipe/test_ReductionRecipe.py +++ b/tests/unit/backend/recipe/test_ReductionRecipe.py @@ -1,11 +1,10 @@ import time -from unittest import TestCase, mock -import pytest from mantid.simpleapi import CreateSingleValuedWorkspace, mtd from snapred.backend.dao.ingredients import ReductionIngredients from snapred.backend.recipe.ReductionRecipe import ( ApplyNormalizationRecipe, + EffectiveInstrumentRecipe, GenerateFocussedVanadiumRecipe, PreprocessReductionRecipe, ReductionGroupProcessingRecipe, @@ -14,6 +13,8 @@ from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceNameGenerator as wng from util.SculleryBoy import SculleryBoy +from unittest import TestCase, mock +import pytest class ReductionRecipeTest(TestCase): sculleryBoy = SculleryBoy() @@ -397,6 +398,9 @@ def test_execute(self, mockMtd): recipe.ingredients.applyNormalization = mock.Mock( return_value=lambda groupingIndex: f"applyNormalization_{groupingIndex}" ) + recipe.ingredients.effectiveInstrument = mock.Mock( + return_value=lambda groupingIndex: f"unmaskedPixelGroup_{groupingIndex}" + ) # Mock internal methods of recipe recipe._applyRecipe = mock.Mock() @@ -462,13 +466,26 @@ def test_execute(self, mockMtd): normalizationWorkspace="norm_grouped", ) - artNormCalls = recipe._prepareArtificialNormalization.call_args_list - anCall1 = artNormCalls[0] - anCall2 = artNormCalls[1] - assert anCall1[0][0] == "sample_grouped" - assert anCall2[0][0] == "sample_grouped" - assert anCall1[0][1] == 0 - assert anCall2[0][1] == 1 + recipe._prepareArtificialNormalization.call_count == 2 + recipe._prepareArtificialNormalization.assert_any_call( + "sample_grouped", + 0 + ) + recipe._prepareArtificialNormalization.assert_any_call( + "sample_grouped", + 1 + ) + + recipe._applyRecipe.assert_any_call( + EffectiveInstrumentRecipe, + recipe.ingredients.effectiveInstrument(0), + inputWorkspace="sample_grouped", + ) + recipe._applyRecipe.assert_any_call( + EffectiveInstrumentRecipe, + recipe.ingredients.effectiveInstrument(1), + inputWorkspace="sample_grouped", + ) recipe._deleteWorkspace.assert_called_with("norm_grouped") assert recipe._deleteWorkspace.call_count == len(recipe._prepGroupingWorkspaces.return_value) diff --git a/tests/unit/backend/service/test_ReductionService.py b/tests/unit/backend/service/test_ReductionService.py index 729359f28..271d8dd57 100644 --- a/tests/unit/backend/service/test_ReductionService.py +++ b/tests/unit/backend/service/test_ReductionService.py @@ -12,10 +12,12 @@ ) from snapred.backend.api.RequestScheduler import RequestScheduler from snapred.backend.dao import WorkspaceMetadata +from snapred.backend.dao.ingredients import ArtificialNormalizationIngredients from snapred.backend.dao.ingredients.ReductionIngredients import ReductionIngredients from snapred.backend.dao.reduction.ReductionRecord import ReductionRecord from snapred.backend.dao.request import ( CreateArtificialNormalizationRequest, + FarmFreshIngredients, ReductionExportRequest, ReductionRequest, ) @@ -74,7 +76,10 @@ def setUp(self): timestamp=self.instance.getUniqueTimestamp(), versions=(1, 2), pixelMasks=[], + keepUnfocused=True, + convertUnitsTo="TOF", focusGroups=[FocusGroup(name="apple", definition="path/to/grouping")], + artificialNormalizationIngredients=mock.Mock(spec=ArtificialNormalizationIngredients) ) def test_name(self): @@ -101,10 +106,22 @@ def test_fetchReductionGroupings(self): def test_prepReductionIngredients(self): # Call the method with the provided parameters - res = self.instance.prepReductionIngredients(self.request) - - assert ReductionIngredients.model_validate(res) - assert res == self.instance.sousChef.prepReductionIngredients(self.request) + result = self.instance.prepReductionIngredients(self.request) + + farmFresh = FarmFreshIngredients( + runNumber=self.request.runNumber, + useLiteMode=self.request.useLiteMode, + timestamp=self.request.timestamp, + focusGroups=self.request.focusGroups, + keepUnfocused=self.request.keepUnfocused, + convertUnitsTo=self.request.convertUnitsTo, + versions=self.request.versions, + ) + expected = self.instance.sousChef.prepReductionIngredients(farmFresh) + expected.artificialNormalizationIngredients = self.request.artificialNormalizationIngredients + + assert ReductionIngredients.model_validate(result) + assert result == expected def test_fetchReductionGroceries(self): self.instance.dataFactoryService.getThisOrLatestCalibrationVersion = mock.Mock(return_value=1) @@ -140,6 +157,64 @@ def test_reduction(self, mockReductionRecipe): mockReductionRecipe.return_value.cook.assert_called_once_with(ingredients, groceries) assert result.record.workspaceNames == mockReductionRecipe.return_value.cook.return_value["outputs"] + @mock.patch(thisService + "ReductionResponse") + @mock.patch(thisService + "ReductionRecipe") + def test_reduction_full_sequence(self, mockReductionRecipe, mockReductionResponse): + mockReductionRecipe.return_value = mock.Mock() + mockResult = { + "result": True, + "outputs": ["one", "two", "three"], + "unfocusedWS": mock.Mock() + } + mockReductionRecipe.return_value.cook = mock.Mock(return_value=mockResult) + self.instance.dataFactoryService.getThisOrLatestCalibrationVersion = mock.Mock(return_value=1) + self.instance.dataFactoryService.stateExists = mock.Mock(return_value=True) + self.instance.dataFactoryService.calibrationExists = mock.Mock(return_value=True) + self.instance.dataFactoryService.getThisOrLatestNormalizationVersion = mock.Mock(return_value=1) + self.instance.dataFactoryService.normalizationExists = mock.Mock(return_value=True) + self.instance._markWorkspaceMetadata = mock.Mock() + + self.instance.fetchReductionGroupings = mock.Mock( + return_value={ + "focusGroups": mock.Mock(), + "groupingWorkspaces": mock.Mock() + } + ) + self.instance.fetchReductionGroceries = mock.Mock( + return_value={ + "combinedPixelMask": mock.Mock() + } + ) + self.instance.prepReductionIngredients = mock.Mock( + return_value=mock.Mock() + ) + self.instance._createReductionRecord = mock.Mock( + return_value=mock.Mock() + ) + + request_ = self.request.model_copy() + self.instance.reduction(request_) + + self.instance.fetchReductionGroupings.assert_called_once_with(request_) + assert request_.focusGroups == self.instance.fetchReductionGroupings.return_value["focusGroups"] + self.instance.fetchReductionGroceries.assert_called_once_with(request_) + self.instance.prepReductionIngredients.assert_called_once_with( + request_, + self.instance.fetchReductionGroceries.return_value["combinedPixelMask"] + ) + assert self.instance.fetchReductionGroceries.return_value["groupingWorkspaces"] ==\ + self.instance.fetchReductionGroupings.return_value["groupingWorkspaces"] + + self.instance._createReductionRecord.assert_called_once_with( + request_, + self.instance.prepReductionIngredients.return_value, + mockReductionRecipe.return_value.cook.return_value["outputs"] + ) + mockReductionResponse.assert_called_once_with( + record=self.instance._createReductionRecord.return_value, + unfocusedData=mockReductionRecipe.return_value.cook.return_value["unfocusedWS"] + ) + def test_reduction_noState_withWritePerms(self): mockRequest = mock.Mock() self.instance.dataFactoryService.stateExists = mock.Mock(return_value=False) @@ -644,7 +719,7 @@ def trackFetchGroceryDict(*args, **kwargs): request.runNumber, request.versions.normalization ).useLiteMode(request.useLiteMode).add() loadableOtherGroceryItems = groceryClerk.buildDict() - residentOtherGroceryKwargs = {"maskWorkspace": combinedMaskName} + residentOtherGroceryKwargs = {"combinedPixelMask": combinedMaskName} self.service.fetchReductionGroceries(request) diff --git a/tests/unit/backend/service/test_SousChef.py b/tests/unit/backend/service/test_SousChef.py index efa9d71d2..daca80271 100644 --- a/tests/unit/backend/service/test_SousChef.py +++ b/tests/unit/backend/service/test_SousChef.py @@ -25,6 +25,7 @@ def setUp(self): cifPath="path/to/cif", maxChiSq=100.0, ) + self.pixelMask = mock.Mock() def tearDown(self): del self.instance @@ -58,9 +59,9 @@ def test_prepManyDetectorPeaks_no_calibration(self): def test_prepManyPixelGroups(self): self.instance.prepPixelGroup = mock.Mock() - res = self.instance.prepManyPixelGroups(self.ingredients) + res = self.instance.prepManyPixelGroups(self.ingredients, self.pixelMask) assert res[0] == self.instance.prepPixelGroup.return_value - self.instance.prepPixelGroup.assert_called_once_with(self.ingredients) + self.instance.prepPixelGroup.assert_called_once_with(self.ingredients, self.pixelMask) def test_prepFocusGroup_exists(self): # create a temp file to be used a the path for the focus group @@ -171,7 +172,10 @@ def test_prepPixelGroup_nocache( ): self.instance = SousChef() self.instance.dataFactoryService.calibrationExists = mock.Mock(return_value=True) - key = (self.ingredients.runNumber, self.ingredients.useLiteMode, self.ingredients.focusGroup.name) + + # Warning: key now includes pixel mask name. + key = (self.ingredients.runNumber, self.ingredients.useLiteMode, self.ingredients.focusGroup.name, None) + # ensure there is no cached value assert self.instance._pixelGroupCache == {} @@ -202,17 +206,17 @@ def test_prepPixelGroup_nocache( @mock.patch(thisService + "PixelGroupingParametersCalculationRecipe") def test_prepPixelGroup_cache(self, PixelGroupingParametersCalculationRecipe): - key = (self.ingredients.runNumber, self.ingredients.useLiteMode, self.ingredients.focusGroup.name) + key = (self.ingredients.runNumber, self.ingredients.useLiteMode, self.ingredients.focusGroup.name, self.pixelMask) # ensure the cache is prepared self.instance._pixelGroupCache[key] = mock.sentinel.pixel - res = self.instance.prepPixelGroup(self.ingredients) + res = self.instance.prepPixelGroup(self.ingredients, self.pixelMask) assert not PixelGroupingParametersCalculationRecipe.called assert res == self.instance._pixelGroupCache[key] def test_prepPixelGroup_cache_not_altered(self): - key = (self.ingredients.runNumber, self.ingredients.useLiteMode, self.ingredients.focusGroup.name) + key = (self.ingredients.runNumber, self.ingredients.useLiteMode, self.ingredients.focusGroup.name, None) # ensure the cache is prepared self.instance._pixelGroupCache[key] = PixelGroup.construct(timeOfFlight={"minimum": 0}) @@ -439,49 +443,57 @@ def test_prepDetectorPeaks_cache_not_altered(self): @mock.patch(thisService + "ReductionIngredients") def test_prepReductionIngredients(self, ReductionIngredients, mockOS): # noqa: ARG002 calibrationCalibrantSamplePath = "a/sample.x" - record = mock.Mock( - smoothingParamter=1.0, + calibrationRecord = mock.Mock( + smoothingParameter=mock.Mock(), calculationParameters=mock.Mock( calibrantSamplePath=calibrationCalibrantSamplePath, ), - calibrantSamplePath=calibrationCalibrantSamplePath, ) - normalRecord = mock.Mock( - smoothingParamter=1.0, - calculationParameters=mock.Mock( - calibrantSamplePath=calibrationCalibrantSamplePath, - ), - normalizationCalibrantSamplePath=calibrationCalibrantSamplePath, + normalizationCalibrantSamplePath = "b/sample.x" + normalizationRecord = mock.Mock( + smoothingParameter=mock.Mock(), + calculationParameters=mock.Mock(), + normalizationCalibrantSamplePath=normalizationCalibrantSamplePath, ) self.instance.prepCalibrantSample = mock.Mock() self.instance.prepRunConfig = mock.Mock() - self.instance.prepManyPixelGroups = mock.Mock() + prepPixelGroupsReturnValues = [mock.Mock(), mock.Mock()] + self.instance.prepManyPixelGroups = mock.Mock(side_effect=prepPixelGroupsReturnValues) self.instance.prepManyDetectorPeaks = mock.Mock() + self.instance._getThresholdFromCalibrantSample = mock.Mock(return_value=mock.Mock()) self.instance.dataFactoryService.getCifFilePath = mock.Mock() self.instance.dataFactoryService.getReductionState = mock.Mock() - self.instance.dataFactoryService.getNormalizationRecord = mock.Mock(return_value=normalRecord) - self.instance.dataFactoryService.getCalibrationRecord = mock.Mock(return_value=record) + self.instance.dataFactoryService.getCalibrationRecord = mock.Mock(return_value=calibrationRecord) + self.instance.dataFactoryService.getNormalizationRecord = mock.Mock(return_value=normalizationRecord) + # Modifications to a copy of `ingredients` during the first part of `prepReductionIngredients`, + # before the `prepManyPixelGroups` calls: ingredients_ = self.ingredients.model_copy() # ... from calibration record: - ingredients_.calibrantSamplePath = calibrationCalibrantSamplePath ingredients_.cifPath = self.instance.dataFactoryService.getCifFilePath.return_value # ... from normalization record: - ingredients_.peakIntensityThreshold = self.instance._getThresholdFromCalibrantSample( - "calibrationCalibrantSamplePath" - ) - result = self.instance.prepReductionIngredients(ingredients_) - - self.instance.prepManyPixelGroups.assert_called_once_with(ingredients_) + ingredients_.calibrantSamplePath = normalizationCalibrantSamplePath + + combinedMask = mock.Mock() + # Note that `prepReductionIngredients` is called with the _unmodified_ ingredients. + result = self.instance.prepReductionIngredients(self.ingredients, combinedMask) + + assert self.instance.prepManyPixelGroups.call_count == 2 + + self.instance.prepManyPixelGroups.assert_any_call(ingredients_) + self.instance.prepManyPixelGroups.assert_any_call(ingredients_, combinedMask) + self.instance.dataFactoryService.getCifFilePath.assert_called_once_with("sample") + ReductionIngredients.assert_called_once_with( runNumber=ingredients_.runNumber, useLiteMode=ingredients_.useLiteMode, timestamp=ingredients_.timestamp, - pixelGroups=self.instance.prepManyPixelGroups.return_value, - smoothingParameter=normalRecord.smoothingParameter, + pixelGroups=prepPixelGroupsReturnValues[0], + unmaskedPixelGroups=prepPixelGroupsReturnValues[1], + smoothingParameter=normalizationRecord.smoothingParameter, calibrantSamplePath=ingredients_.calibrantSamplePath, - peakIntensityThreshold=ingredients_.peakIntensityThreshold, + peakIntensityThreshold=self.instance._getThresholdFromCalibrantSample.return_value, detectorPeaksMany=self.instance.prepManyDetectorPeaks.return_value, keepUnfocused=ingredients_.keepUnfocused, convertUnitsTo=ingredients_.convertUnitsTo, diff --git a/tests/unit/ui/workflow/test_DiffCalWorkflow.py b/tests/unit/ui/workflow/test_DiffCalWorkflow.py index c58a09077..6db0b3501 100644 --- a/tests/unit/ui/workflow/test_DiffCalWorkflow.py +++ b/tests/unit/ui/workflow/test_DiffCalWorkflow.py @@ -1,6 +1,7 @@ -import threading from random import randint -from unittest.mock import MagicMock, patch + +from qtpy.QtCore import Qt +from qtpy.QtWidgets import QApplication, QMessageBox from mantid.simpleapi import ( CreateSingleValuedWorkspace, @@ -8,12 +9,11 @@ GroupWorkspaces, mtd, ) -from qtpy.QtCore import Qt -from qtpy.QtWidgets import QApplication, QMessageBox from snapred.meta.mantid.FitPeaksOutput import FIT_PEAK_DIAG_SUFFIX, FitOutputEnum from snapred.meta.pointer import create_pointer from snapred.ui.workflow.DiffCalWorkflow import DiffCalWorkflow +from unittest.mock import MagicMock, patch @patch("snapred.ui.workflow.DiffCalWorkflow.WorkflowImplementer.request") def test_purge_bad_peaks(workflowRequest, qtbot): # noqa: ARG001 @@ -135,16 +135,29 @@ def test_purge_bad_peaks_too_few(workflowRequest, qtbot): # noqa: ARG001 ) diffcalWorkflow.fitPeaksDiagnostic = diagWS - def execute_click(): - w = QApplication.activeWindow() - if isinstance(w, QMessageBox): - close_button = w.button(QMessageBox.Ok) - qtbot.mouseClick(close_button, Qt.LeftButton) - # setup the qtbot to intercept the window qtbot.addWidget(diffcalWorkflow._tweakPeakView) - threading.Timer(0.1, execute_click).start() + + # + # Using a mock here bypasses the following issues: + # + # * which thread the messagebox will be running on (may cause a segfault); + # + # * how long to wait for the messagebox to instantiate. + # + def _tooFewPeaksQuery(_parent, title, text, _buttons): + if title == "Too Few Peaks": + return QMessageBox.Ok + raise RuntimeError(f"unexpected `QMessageBox.critical`: title: {title}, text: {text}") + + mockTooFewPeaksQuery = patch( "qtpy.QtWidgets.QMessageBox.critical", _tooFewPeaksQuery) + + # Use `start` and `stop` rather than `with patch...` in order to apply the mock even in the case of exceptions. + mockTooFewPeaksQuery.start() diffcalWorkflow.purgeBadPeaks(maxChiSq) + + # Remember to remove the mock. + mockTooFewPeaksQuery.stop() assert diffcalWorkflow.ingredients.groupedPeakLists[0].peaks == peaks assert diffcalWorkflow.ingredients.groupedPeakLists[0].peaks != good_peaks diff --git a/tests/util/SculleryBoy.py b/tests/util/SculleryBoy.py index 99752c50a..ec485379c 100644 --- a/tests/util/SculleryBoy.py +++ b/tests/util/SculleryBoy.py @@ -1,4 +1,4 @@ -from typing import Dict, List +from typing import Dict, List, Optional from unittest import mock import pydantic @@ -15,6 +15,7 @@ from snapred.backend.dao.state.PixelGroup import PixelGroup from snapred.backend.dao.state.PixelGroupingParameters import PixelGroupingParameters from snapred.backend.recipe.GenericRecipe import DetectorPeakPredictorRecipe +from snapred.meta.mantid.WorkspaceNameGenerator import WorkspaceName from snapred.meta.Config import Resource from snapred.meta.redantic import parse_file_as from util.dao import DAOFactory @@ -91,7 +92,7 @@ def prepDetectorPeaks(self, ingredients: FarmFreshIngredients, purgePeaks=False) except (TypeError, AttributeError): return [mock.Mock(spec_set=GroupPeakList)] - def prepReductionIngredients(self, ingredients: FarmFreshIngredients): # noqa ARG002 + def prepReductionIngredients(self, _ingredients: FarmFreshIngredients, _combinedPixelMask: Optional[WorkspaceName] = None): path = Resource.getPath("/inputs/calibration/ReductionIngredients.json") return parse_file_as(ReductionIngredients, path)