From 2dcdad7c38a6bc5a25d22ef1b13c012df1e01a98 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Sun, 19 May 2024 17:02:20 +0100 Subject: [PATCH 01/53] First step in moving to compatible XML-YML transition status --- deprecated/HardwareObjects/Camera.py | 12 +- deprecated/HardwareObjects/NamedState.py | 4 +- docs/source/dev/configuration_files.md | 4 +- mxcubecore/BaseHardwareObjects.py | 316 ++++++++++-------- mxcubecore/CommandContainer.py | 81 ++--- mxcubecore/HardwareObjectFileParser.py | 4 +- .../HardwareObjects/ALBA/ALBAEpsActuator.py | 2 +- mxcubecore/HardwareObjects/Beamline.py | 106 +----- .../HardwareObjects/BlissHutchTrigger.py | 2 +- mxcubecore/HardwareObjects/Camera.py | 12 +- .../HardwareObjects/DESY/P11SampleChanger.py | 6 +- .../HardwareObjects/ESRF/BlissHutchTrigger.py | 4 +- .../HardwareObjects/ESRF/ID232HutchTrigger.py | 4 +- .../HardwareObjects/ESRF/ID29HutchTrigger.py | 2 +- mxcubecore/HardwareObjects/ESRF/ID30Cryo.py | 2 +- .../HardwareObjects/ESRF/ID30HutchTrigger.py | 2 +- mxcubecore/HardwareObjects/ESRF/ID30Light.py | 2 +- .../HardwareObjects/GrobSampleChanger.py | 6 +- .../HardwareObjects/MAXIV/BIOMAXAperture.py | 2 +- mxcubecore/HardwareObjects/MD2Motor.py | 4 +- .../HardwareObjects/MicrodiffSamplePseudo.py | 2 +- mxcubecore/HardwareObjects/MiniDiff.py | 15 +- mxcubecore/HardwareObjects/QueueModel.py | 4 +- mxcubecore/HardwareObjects/RobodiffLight.py | 2 +- .../HardwareObjects/SOLEIL/PX1/PX1BeamInfo.py | 4 +- .../SOLEIL/PX1/PX1DetectorDistance.py | 2 +- .../HardwareObjects/SOLEIL/PX1/PX1Energy.py | 8 +- .../HardwareObjects/SOLEIL/PX1/PX1MiniDiff.py | 4 +- .../SOLEIL/PX1/PX1Resolution.py | 2 +- .../HardwareObjects/SOLEIL/PX2/PX2BeamInfo.py | 8 +- .../HardwareObjects/SOLEIL/SOLEILPss.py | 4 +- .../HardwareObjects/SOLEIL/TangoDCMotor.py | 4 +- mxcubecore/HardwareObjects/SardanaMotor.py | 2 +- mxcubecore/HardwareObjects/TangoMotor.py | 8 +- mxcubecore/HardwareObjects/XMLRPCServer.py | 15 +- .../abstract/AbstractVideoDevice.py | 4 +- .../mockup/SampleChangerMockup.py | 6 +- mxcubecore/HardwareRepository.py | 82 +++-- mxcubecore/queue_entry/characterisation.py | 10 +- mxcubecore/queue_entry/data_collection.py | 4 +- test/pytest/test_base_hardware_objects.py | 87 +---- 41 files changed, 358 insertions(+), 496 deletions(-) diff --git a/deprecated/HardwareObjects/Camera.py b/deprecated/HardwareObjects/Camera.py index b6dc01b3b7..57f27adb4a 100644 --- a/deprecated/HardwareObjects/Camera.py +++ b/deprecated/HardwareObjects/Camera.py @@ -482,7 +482,7 @@ def takeSnapshot(self, *args, **kwargs): # img.save(*args) except Exception: logging.getLogger("HWR").exception( - "%s: could not save snapshot", self.name() + "%s: could not save snapshot", self.id ) else: if len(args): @@ -490,7 +490,7 @@ def takeSnapshot(self, *args, **kwargs): img.save(*args) except Exception: logging.getLogger("HWR").exception( - "%s: could not save snapshot", self.name() + "%s: could not save snapshot", self.id ) else: return True @@ -499,7 +499,7 @@ def takeSnapshot(self, *args, **kwargs): else: logging.getLogger("HWR").error( "%s: could not take snapshot: sorry PIL is not available :-(", - self.name(), + self.id, ) return False @@ -833,7 +833,7 @@ def takeSnapshot(self, *args): # img.save(*args) except Exception: logging.getLogger("HWR").exception( - "%s: could not save snapshot", self.name() + "%s: could not save snapshot", self.id ) else: if len(args): @@ -841,7 +841,7 @@ def takeSnapshot(self, *args): img.save(*args) except Exception: logging.getLogger("HWR").exception( - "%s: could not save snapshot", self.name() + "%s: could not save snapshot", self.id ) else: return True @@ -850,7 +850,7 @@ def takeSnapshot(self, *args): else: logging.getLogger("HWR").error( "%s: could not take snapshot: sorry PIL is not available :-(", - self.name(), + self.id, ) return False diff --git a/deprecated/HardwareObjects/NamedState.py b/deprecated/HardwareObjects/NamedState.py index f442a766a2..5ab735fd23 100644 --- a/deprecated/HardwareObjects/NamedState.py +++ b/deprecated/HardwareObjects/NamedState.py @@ -65,13 +65,13 @@ def connect_notify(self, signal): self.emit(signal, (self.get_state(),)) def stateChanged(self, channelValue): - logging.info("hw NamedState %s. got new value %s" % (self.name(), channelValue)) + logging.info("hw NamedState %s. got new value %s" % (self.id, channelValue)) self.set_is_ready(True) self.emit("stateChanged", (self.get_state(),)) def hardwareStateChanged(self, channelValue): logging.info( - "hw NamedState %s. Hardware state is now %s" % (self.name(), channelValue) + "hw NamedState %s. Hardware state is now %s" % (self.id, channelValue) ) self.hdw_state = channelValue self.emit("hardwareStateChanged", (self.hdw_state,)) diff --git a/docs/source/dev/configuration_files.md b/docs/source/dev/configuration_files.md index 10c5b1f193..677b17c496 100644 --- a/docs/source/dev/configuration_files.md +++ b/docs/source/dev/configuration_files.md @@ -112,11 +112,11 @@ You can get and set the values of simple properties by normal `obj.attr` syntax, which will also get you normal, non-property attributes. The `get_properties` method returns a dictionary of all properties and their values, and the `get_property` method behaves as `get_properties().get`. -Direct setting of properties internally calls the `set_property` function, +Direct setting of properties internally calls the `_set_property` function, and this function automatically converts strings to `int`, `float` or `bool` if possible. There are additional ways of accessing contained objects. -`get_objects` and `has_object` take as input the object name +`_get_objects` and `has_object` take as input the object name As currently coded (was it always thus?) the name is equal to the role name used to add the object. An XML-configured object is also coded to mimic a Python list and dictionary of contained objects, so that `anObject[ii]` diff --git a/mxcubecore/BaseHardwareObjects.py b/mxcubecore/BaseHardwareObjects.py index ee979a1649..11d7f0b75b 100644 --- a/mxcubecore/BaseHardwareObjects.py +++ b/mxcubecore/BaseHardwareObjects.py @@ -25,7 +25,6 @@ import logging import typing import warnings -from collections import OrderedDict from typing import ( TYPE_CHECKING, Any, @@ -35,9 +34,6 @@ Iterator, List, Optional, -) -from typing import OrderedDict as TOrderedDict -from typing import ( Tuple, Type, Union, @@ -91,26 +87,61 @@ class DefaultSpecificState(enum.Enum): class ConfiguredObject: """Superclass for classes that take configuration from YAML files""" - # Roles of defined objects and the category they belong to - # NB the double underscore is deliberate - attribute must be hidden from subclasses - __content_roles: List[str] = [] - - # Procedure names - placeholder. - # Will be replaced by a set in any subclasses that can contain procedures - # Note that _procedure_names may *not* be set if it is already set in a superclass - _procedure_names: Optional[List[str]] = None + class HOConfig(pydantic.BaseModel): + model_config = pydantic.ConfigDict(extra="allow") - def __init__(self, name: str) -> None: + def __init__( + self, name: str, hwobj_container: Optional["ConfiguredObject"] = None + ) -> None: """ Args: - name (str): Name. + name (str): Equal to role name relative to hwobj_container (if applicable) """ - self.name = name + self._name = name + self._config: Optional["ConfiguredObject.HOConfig"] = None + self._hwobj_container: Optional[ConfiguredObject] = hwobj_container - self._objects: TOrderedDict[str, Union[object, None]] = OrderedDict( - (role, None) for role in self.all_roles - ) + def __getattr__(self, name): + return getattr(self._config, name) + + @property + def name(self): + """HWOBJ name - Equal to role name relative to hwobj_container (if applicable)""" + return self._name + + @property + def id(self): + """dot-separated role names defining path from beamline object to here + + NB beamline.id == ''""" + if self._hwobj_container: + names = [] + obj = self + while obj._hwobj_container: + names.append(obj.name) + return ".".join(reversed(names)) + else: + return "" + + @property + def config(self): + """Pydantic object with conifigured parameters, incl. contained HardwareObjects""" + return self._config + + @property + def hwobj_container(self): + """HardwareObject contaiing this one - None for Beamline""" + return self._hwobj_container + + def get_by_id(self, _id: str) -> "ConfiguredObject": + result = self + for name in _id.split("."): + result = getattr(result._config, name) + if result is None: + break + # + return result def _init(self) -> None: """Object initialisation - executed *before* loading contents""" @@ -120,58 +151,76 @@ def init(self) -> None: """Object initialisation - executed *after* loading contents""" pass - def replace_object(self, role: str, new_object: object) -> None: - """Replace already defined Object with a new one - for runtime use - - Args: - role (str): Role name of contained Object - new_object (object): New contained Object - - Raises: - ValueError: If contained object role is unknown. - """ - if role in self._objects: - self._objects[role] = new_object - else: - raise ValueError("Unknown contained Object role: %s" % role) - - # NB this function must be re-implemented in nested subclasses @property - def all_roles(self) -> Tuple[str]: - """Tuple of all content object roles, in definition and loading order + def objects_by_role(self) -> dict[str, Union[Self, None]]: + """All contained Objects mapped by role (in specification order). + + Includes objects defined in subclasses. Returns: - Tuple[str]: Content object roles + OrderedDict[str, Union[Self, None]]: Contained objects mapped by role. """ - return tuple(self.__content_roles) + return dict( + tpl + for tpl in self._config.model_dump().items() + if isinstance(tpl[1], ConfiguredObject) + ) - @property - def all_objects_by_role(self) -> TOrderedDict[str, Union[Self, None]]: - """All contained Objects mapped by role (in specification order). + def get_properties(self) -> dict[str, Any]: + """Get configured properties (not roles)""" + return dict( + tpl + for tpl in self._config.model_dump().items() + if not isinstance(tpl[1], ConfiguredObject) and tpl[1] is not None + ) - Includes objects defined in subclasses. + def get_property(self, name: str, default_value: Optional[Any] = None) -> Any: + """Get property value or contained HardwareObject. + + Args: + name (str): Name + default_value (Optional[Any], optional): Default value. Defaults to None. Returns: - OrderedDict[str, Union[Self, None]]: Contained objects mapped by role. + Any: Property value. """ - return self._objects.copy() + return ( + getattr(self._config, name) + if hasattr(self._config, name) + else default_value + ) - @property - def procedures(self) -> TOrderedDict[str, Self]: - """Procedures attached to this object mapped by name (in specification order). + def get_roles(self) -> List[str]: + """Get hardware object roles. Returns: - OrderedDict[str, Self]: Object procedures. + List[str]: List of hardware object roles. """ - procedure_names = self.__class__._procedure_names - result = OrderedDict() - if procedure_names: - for name in procedure_names: - procedure = getattr(self, name) - if procedure is not None: - result[name] = procedure + warnings.warn( + "%s.get_roles is deprecated. Avoid, or use objects_by_role instead" + % self.__class__.__name__ + ) + return list( + tpl[0] + for tpl in self._config.model_dump().items() + if isinstance(tpl[1], ConfiguredObject) + ) - return result + def print_log( + self, + log_type: str = "HWR", + level: str = "debug", + msg: str = "", + ) -> None: + """Print message to logger. + + Args: + log_type (str, optional): Logger type. Defaults to "HWR". + level (str, optional): Logger level. Defaults to "debug". + msg (str, optional): Message to log. Defaults to "". + """ + if hasattr(logging.getLogger(log_type), level): + getattr(logging.getLogger(log_type), level)(msg) class PropertySet(dict): @@ -224,7 +273,8 @@ def get_changes(self) -> Generator[tuple, None, None]: class HardwareObjectNode: """Hardware Object Node""" - user_file_directory: str + # + # user_file_directory: str def __init__(self, node_name: str) -> None: """ @@ -241,16 +291,18 @@ def __init__(self, node_name: str) -> None: self.__references: List[Tuple[str, str, str, int, int, int]] = [] self._xml_path: Union[str, None] = None - @staticmethod - def set_user_file_directory(user_file_directory: str) -> None: - """Set user file directory. - - Args: - user_file_directory (str): User file directory path. - """ - HardwareObjectNode.user_file_directory = user_file_directory + # + # @staticmethod + # def set_user_file_directory(user_file_directory: str) -> None: + # """Set user file directory. + # + # Args: + # user_file_directory (str): User file directory path. + # """ + # HardwareObjectNode.user_file_directory = user_file_directory - def name(self) -> str: + @property + def load_name(self) -> str: """Get node name. Returns: @@ -258,22 +310,6 @@ def name(self) -> str: """ return self.__name - def set_name(self, name: str) -> None: - """Set node name - - Args: - name (str): Name to set. - """ - self.__name = name - - def get_roles(self) -> List[str]: - """Get hardware object roles. - - Returns: - List[str]: List of hardware object roles. - """ - return list(self._objects_by_role.keys()) - def set_path(self, path: str) -> None: """Set the 'path' of the Hardware Object in the XML file describing it (the path follows the XPath syntax) @@ -281,37 +317,28 @@ def set_path(self, path: str) -> None: Args: path (str): String representing the path of the Hardware Object in its file """ - self._path = path - def get_xml_path(self) -> Union[str, None]: - """Get XML file path. + # NB For use ONLY in loading xml-configured objects - Returns: - Union[str, None]: XML file path. - """ - return self._xml_path + self._path = path def __iter__(self) -> Generator[Union["HardwareObject", None], None, None]: + warnings.warn("%s.__iter__ is Deprecated. Avoid" % self.__class__.__name__) for i in range(len(self.__objects_names)): for object in self.__objects[i]: yield object def __len__(self) -> int: + warnings.warn("%s.__len__ is Deprecated. Avoid" % self.__class__.__name__) return sum(map(len, self.__objects)) - def __getattr__(self, attr: str) -> Any: - if attr.startswith("__"): - raise AttributeError(attr) - - try: - return self.__dict__["_property_set"][attr] - except KeyError: - raise AttributeError(attr) - def __setattr__(self, attr: str, value: Any) -> None: try: - if attr not in self.__dict__ and attr in self._property_set: - self.set_property(attr, value) + if attr not in self.__dict__ and attr in self._config.model_dump(): + warnings.warn( + "%s.__setattr__ is Deprecated. Avoid" % self.__class__.__name__ + ) + self._set_property(attr, value) else: self.__dict__[attr] = value except AttributeError: @@ -321,6 +348,7 @@ def __getitem__( self, key: Union[str, int], ) -> Union["HardwareObject", List[Union["HardwareObject", None]], None]: + warnings.warn("%s.__getitem__ is Deprecated. Avoid" % self.__class__.__name__) if isinstance(key, str): object_name = key @@ -361,6 +389,7 @@ def add_reference( reference (str): Xpath reference. role (Union[str, None], optional): Role. Defaults to None. """ + role = str(role).lower() try: @@ -386,6 +415,8 @@ def resolve_references(self) -> None: # NB Must be here - importing at top level leads to circular imports from .HardwareRepository import get_hardware_repository + # NB For use ONLY in loading xml-configured objects + while len(self.__references) > 0: ( reference, @@ -397,6 +428,11 @@ def resolve_references(self) -> None: ) = self.__references.pop() hw_object = get_hardware_repository().get_hardware_object(reference) + warnings.warn( + "Deprecated: Hardware object '{}' with role '{}' is a reference".format( + name, role + ) + ) if hw_object is not None: self._objects_by_role[role] = hw_object @@ -419,7 +455,7 @@ def resolve_references(self) -> None: for hw_object in self: hw_object.resolve_references() - def add_object( + def _add_object( self, name: str, hw_object: Union["HardwareObject", None], @@ -432,6 +468,9 @@ def add_object( hw_object (Union[HardwareObject, None]): Hardware object. role (Optional[str], optional): Role. Defaults to None. """ + + # NB For use ONLY in loading xml-configured objects + if hw_object is None: return None elif role is not None: @@ -447,18 +486,9 @@ def add_object( else: self.__objects[index].append(hw_object) - def has_object(self, object_name: str) -> bool: - """Check if has hardware object by name. - - Args: - object_name (str): Name. - - Returns: - bool: True if object name in hardware object node, otherwise False. - """ - return object_name in self.__objects_names + # - def get_objects( + def _get_objects( self, object_name: str, ) -> Generator[Union["HardwareObject", None], None, None]: @@ -470,6 +500,9 @@ def get_objects( Yields: Union[HardwareObject, None]: Hardware object. """ + + # NB For use ONLY in loading xml-configured objects + try: index = self.__objects_names.index(object_name) except ValueError: @@ -487,6 +520,10 @@ def get_object_by_role(self, role: str) -> Union["HardwareObject", None]: Returns: Union[HardwareObject, None]: Hardware object. """ + warnings.warn( + "%s.get_object_by_role is deprecated. Use get_property instead" + % self.__class__.__name__ + ) role = str(role).lower() objects = [self] @@ -498,7 +535,7 @@ def get_object_by_role(self, role: str) -> Union["HardwareObject", None]: else: return result - def objects_names(self) -> List[Union[str, None]]: + def _objects_names(self) -> List[Union[str, None]]: """Return hardware object names. Returns: @@ -506,13 +543,16 @@ def objects_names(self) -> List[Union[str, None]]: """ return self.__objects_names[:] - def set_property(self, name: str, value: Any) -> None: + def _set_property(self, name: str, value: Any) -> None: """Set property value. Args: name (str): Name. value (Any): Value. """ + + # NB For use ONLY in loading xml-configured objects + name = str(name) value = str(value) @@ -536,41 +576,16 @@ def set_property(self, name: str, value: Any) -> None: self._property_set[name] = value self._property_set.set_property_path(name, self._path + "/" + str(name)) - def get_property(self, name: str, default_value: Optional[Any] = None) -> Any: - """Get property value. - - Args: - name (str): Name - default_value (Optional[Any], optional): Default value. Defaults to None. - - Returns: - Any: Property value. - """ - return self._property_set.get(str(name), default_value) - - def get_properties(self) -> PropertySet: - """Get properties. + def _get_properties(self) -> PropertySet: + """Get properties - for XML-config implementation loading only Returns: PropertySet: Properties. """ - return self._property_set - def print_log( - self, - log_type: str = "HWR", - level: str = "debug", - msg: str = "", - ) -> None: - """Print message to logger. + # NB For use ONLY in loading xml-configured objects - Args: - log_type (str, optional): Logger type. Defaults to "HWR". - level (str, optional): Logger level. Defaults to "debug". - msg (str, optional): Message to log. Defaults to "". - """ - if hasattr(logging.getLogger(log_type), level): - getattr(logging.getLogger(log_type), level)(msg) + return self._property_set class HardwareObjectMixin(CommandContainer): @@ -1004,7 +1019,7 @@ def disconnect(self, signal, slot, sender=None): ... # pass -class HardwareObject(HardwareObjectNode, HardwareObjectMixin): +class HardwareObject(ConfiguredObject, HardwareObjectNode, HardwareObjectMixin): """Xml-configured hardware object""" def __init__(self, rootName: str) -> None: @@ -1038,12 +1053,17 @@ def init(self) -> None: HardwareObjectMixin.init(self) def __getstate__(self) -> str: - return self.name() + + # NBNB Needs updating + + return self.load_name def __setstate__(self, name: str) -> None: # NB Must be here - importing at top level leads to circular imports from .HardwareRepository import get_hardware_repository + # NBNB Needs updating + obj = get_hardware_repository().get_hardware_object(name) self.__dict__.update(obj.__dict__) @@ -1073,7 +1093,7 @@ def get_changes(node: Self) -> Union[list, List[tuple], Any]: if isinstance(node, HardwareObject): if updates: - get_hardware_repository().update(node.name(), updates) + get_hardware_repository().update(node.load_name, updates) return [] else: return updates @@ -1090,7 +1110,7 @@ def rewrite_xml(self, xml: Union[bytes, Any]) -> None: # NB Must be here - importing at top level leads to circular imports from .HardwareRepository import get_hardware_repository - get_hardware_repository().rewrite_xml(self.name(), xml) + get_hardware_repository().rewrite_xml(self.load_name, xml) def xml_source(self) -> Union[str, Any]: """Get XML configuration source. @@ -1102,7 +1122,7 @@ def xml_source(self) -> Union[str, Any]: # NB Must be here - importing at top level leads to circular imports from .HardwareRepository import get_hardware_repository - return get_hardware_repository().xml_source[self.name()] + return get_hardware_repository().xml_source[self.load_name] class HardwareObjectYaml(ConfiguredObject, HardwareObjectMixin): diff --git a/mxcubecore/CommandContainer.py b/mxcubecore/CommandContainer.py index 874e83409d..306e660065 100644 --- a/mxcubecore/CommandContainer.py +++ b/mxcubecore/CommandContainer.py @@ -330,7 +330,7 @@ def get_channel_object( """ channel = self.__channels.get(channel_name) if channel is None and not optional: - msg = "%s: Unable to get channel %s" % (self.name(), channel_name) + msg = "%s: Unable to get channel %s" % (self.id, channel_name) logging.getLogger("user_level_log").error(msg) # raise Exception(msg) return channel @@ -396,7 +396,7 @@ def add_channel( except Exception: logging.getLogger().error( "%s: cannot add channel %s (hint: check attributes)", - self.name(), + self.id, channel_name, ) elif channel_type.lower() == "taco": @@ -413,7 +413,7 @@ def add_channel( except Exception: logging.getLogger().error( "%s: cannot add channel %s (hint: check attributes)", - self.name(), + self.id, channel_name, ) elif channel_type.lower() == "tango": @@ -430,14 +430,14 @@ def add_channel( except ConnectionError: logging.getLogger().error( "%s: could not connect to device server %s (hint: is it running ?)", - self.name(), + self.id, attributes_dict["tangoname"], ) raise ConnectionError except Exception: logging.getLogger().exception( "%s: cannot add channel %s (hint: check attributes)", - self.name(), + self.id, channel_name, ) elif channel_type.lower() == "exporter": @@ -459,7 +459,7 @@ def add_channel( except Exception: logging.getLogger().exception( "%s: cannot add exporter channel %s (hint: check attributes)", - self.name(), + self.id, channel_name, ) elif channel_type.lower() == "epics": @@ -470,7 +470,7 @@ def add_channel( except Exception: logging.getLogger().exception( "%s: cannot add EPICS channel %s (hint: check PV name)", - self.name(), + self.id, channel_name, ) elif channel_type.lower() == "tine": @@ -487,7 +487,7 @@ def add_channel( except Exception: logging.getLogger("HWR").exception( "%s: cannot add TINE channel %s (hint: check attributes)", - self.name(), + self.id, channel_name, ) @@ -505,7 +505,7 @@ def add_channel( logging.getLogger().debug( "Creating a sardanachannel - %s / %s / %s", - self.name(), + self.id, channel_name, str(attributes_dict), ) @@ -516,7 +516,7 @@ def add_channel( except Exception: logging.getLogger().exception( "%s: cannot add SARDANA channel %s (hint: check PV name)", - self.name(), + self.id, channel_name, ) @@ -534,7 +534,7 @@ def add_channel( except Exception: logging.getLogger("HWR").exception( "%s: cannot add Mockup channel %s (hint: check attributes)", - self.name(), + self.id, channel_name, ) @@ -660,7 +660,7 @@ def add_command( except KeyError as err: logging.getLogger().error( '%s: cannot add command: missing "%s" property', - self.name(), + self.id, err.args[0], ) return @@ -683,7 +683,7 @@ def add_command( except Exception: logging.getLogger().exception( '%s: could not add command "%s" (hint: check command attributes)', - self.name(), + self.id, cmd_name, ) elif cmd_type.lower() == "taco": @@ -700,7 +700,7 @@ def add_command( except Exception: logging.getLogger().exception( '%s: could not add command "%s" (hint: check command attributes)', - self.name(), + self.id, cmd_name, ) elif cmd_type.lower() == "tango": @@ -716,14 +716,14 @@ def add_command( except ConnectionError: logging.getLogger().error( "%s: could not connect to device server %s (hint: is it running ?)", - self.name(), + self.id, attributes_dict["tangoname"], ) raise ConnectionError except Exception: logging.getLogger().exception( '%s: could not add command "%s" (hint: check command attributes)', - self.name(), + self.id, cmd_name, ) @@ -746,7 +746,7 @@ def add_command( except Exception: logging.getLogger().exception( "%s: cannot add command %s (hint: check attributes)", - self.name(), + self.id, cmd_name, ) elif cmd_type.lower() == "epics": @@ -757,7 +757,7 @@ def add_command( except Exception: logging.getLogger().exception( "%s: cannot add EPICS channel %s (hint: check PV name)", - self.name(), + self.id, cmd_name, ) @@ -808,7 +808,7 @@ def add_command( else: logging.getLogger().error( "%s: incomplete sardana command declaration. ignored", - self.name(), + self.id, ) from mxcubecore.Command.Sardana import ( @@ -822,14 +822,14 @@ def add_command( except ConnectionError: logging.getLogger().error( "%s: could not connect to sardana door %s (hint: is it running ?)", - self.name(), + self.id, attributes_dict["doorname"], ) raise ConnectionError except Exception: logging.getLogger().exception( '%s: could not add command "%s" (hint: check command attributes)', - self.name(), + self.id, cmd_name, ) elif cmd_type == "command" and taurusname is not None: @@ -838,19 +838,19 @@ def add_command( except ConnectionError: logging.getLogger().error( "%s: could not connect to sardana device %s (hint: is it running ?)", - self.name(), + self.id, taurusname, ) raise ConnectionError except Exception: logging.getLogger().exception( '%s: could not add command "%s" (hint: check command attributes)', - self.name(), + self.id, cmd_name, ) else: logging.getLogger().error( - "%s: incomplete sardana command declaration. ignored", self.name() + "%s: incomplete sardana command declaration. ignored", self.id ) elif cmd_type.lower() == "pool": @@ -866,14 +866,14 @@ def add_command( except ConnectionError: logging.getLogger().error( "%s: could not connect to device server %s (hint: is it running ?)", - self.name(), + self.id, attributes_dict["tangoname"], ) raise ConnectionError except Exception: logging.getLogger().exception( '%s: could not add command "%s" (hint: check command attributes)', - self.name(), + self.id, cmd_name, ) elif cmd_type.lower() == "tine": @@ -890,7 +890,7 @@ def add_command( except Exception: logging.getLogger().exception( '%s: could not add command "%s" (hint: check command attributes)', - self.name(), + self.id, cmd_name, ) @@ -902,7 +902,7 @@ def add_command( except Exception: logging.getLogger().exception( '%s: could not add command "%s" (hint: check command attributes)', - self.name(), + self.id, cmd_name, ) @@ -911,11 +911,11 @@ def add_command( if not isinstance(arg1, dict): i = 1 - for arg in arg1.get_objects("argument"): - on_change = arg.get_property("onchange") + for arg in arg1.get_property("argument"): + on_change = arg.get("onchange") if on_change is not None: on_change = (on_change, weakref.ref(self)) - value_from = arg.get_property("valuefrom") + value_from = arg.get("valuefrom") if value_from is not None: value_from = (value_from, weakref.ref(self)) @@ -924,48 +924,49 @@ def add_command( except IndexError: try: new_command.add_argument( - arg.get_property("name"), - arg.type, + arg.get("name"), + arg["type"], onchange=on_change, valuefrom=value_from, ) except AttributeError: logging.getLogger().error( '%s, command "%s": could not add argument %d, missing type or name', - self.name(), + self.id, cmd_name, i, ) continue else: if isinstance(combo_items, list): - combo_items = [] + ll1 = [] for item in combo_items: name = item.get_property("name") value = item.get_property("value") if name is None or value is None: logging.getLogger().error( "%s, command '%s': could not add argument %d, missing combo item name or value", - self.name(), + self.id, cmd_name, i, ) continue else: - combo_items.append((name, value)) + ll1.append((name, value)) + combo_items = ll1 else: name = combo_items.get_property("name") value = combo_items.get_property("value") if name is None or value is None: - combo_items = ((name, value),) - else: logging.getLogger().error( "%s, command '%s': could not add argument %d, missing combo item name or value", - self.name(), + self.id, cmd_name, i, ) continue + else: + combo_items = ((name, value),) new_command.add_argument( arg.get_property("name"), diff --git a/mxcubecore/HardwareObjectFileParser.py b/mxcubecore/HardwareObjectFileParser.py index 870801d1f8..cc8e7c7785 100644 --- a/mxcubecore/HardwareObjectFileParser.py +++ b/mxcubecore/HardwareObjectFileParser.py @@ -376,13 +376,13 @@ def endElement(self, name): ) elif name == self.property: del self.objects[-1] # remove empty object - self.objects[-1].set_property(name, self.buffer) + self.objects[-1]._set_property(name, self.buffer) else: if len(self.objects) == 1: return if len(self.objects) > 1: - self.objects[-2].add_object( + self.objects[-2]._add_object( name, self.objects[-1], role=self.element_role ) if len(self.objects) > 0: diff --git a/mxcubecore/HardwareObjects/ALBA/ALBAEpsActuator.py b/mxcubecore/HardwareObjects/ALBA/ALBAEpsActuator.py index 1b821476cd..672ee8f37f 100644 --- a/mxcubecore/HardwareObjects/ALBA/ALBAEpsActuator.py +++ b/mxcubecore/HardwareObjects/ALBA/ALBAEpsActuator.py @@ -81,7 +81,7 @@ def init(self): self.actuator_channel.connect_signal("update", self.stateChanged) except KeyError: logging.getLogger().warning( - "%s: cannot report EPS Actuator State", self.name() + "%s: cannot report EPS Actuator State", self.id ) try: diff --git a/mxcubecore/HardwareObjects/Beamline.py b/mxcubecore/HardwareObjects/Beamline.py index 686a1f3f09..c654449a0c 100644 --- a/mxcubecore/HardwareObjects/Beamline.py +++ b/mxcubecore/HardwareObjects/Beamline.py @@ -35,6 +35,7 @@ Any, Union, ) +from warnings import warn from mxcubecore.dispatcher import dispatcher @@ -146,11 +147,6 @@ def __init__(self, name): # 2D-points, (none centred positions) self.enable_2d_points = True - # Dictionary with the python id of hardwareobject as key - # and the "dotted/attribute path" to hardwareobject from the - # Beamline object - self._hardware_object_id_dict = {} - def init(self): """Object initialisation - executed *after* loading contents""" # Validate acquisition parameters @@ -180,76 +176,12 @@ def _hwr_init_done(self): self._hardware_object_id_dict = self._get_id_dict() def get_id(self, ho: HardwareObject) -> str: - """ - Returns "dotted path/attribute" which is unique within the context of - HardwareRepository - - Args: - ho: The hardware object for which to get the id - - Returns: - "dotted path/attribute" - """ - return self._hardware_object_id_dict.get(ho) + warn("Beamline.get_id is Deprecated. Use hwobj.id instead") + return ho.id def get_hardware_object(self, _id: str) -> Union[HardwareObject, None]: - """ - Returns the HardwareObject with the given id - - Args: - _id: "attribute path" / id of HardwareObject - Returns: - HardwareObject with the given id - """ - found_ho = None - - for current_ho, current_id in self._hardware_object_id_dict.items(): - if current_id == _id: - found_ho = current_ho - - return found_ho - - def _get_id_dict(self) -> dict: - """ - Wrapper function used to call the recursive method used to find all - HardwareObjects accessible from the Beamline object. - """ - result = {} - - for ho_name in self.all_roles: - ho = self._objects.get(ho_name) - - if ho: - result[ho] = ho_name - self._get_id_dict_rec(ho, ho_name, result) - - return result - - def _get_id_dict_rec( - self, ho: HardwareObject, _path: str = "", result: dict = {} - ) -> str: - """ - Recurses through all the roles of ho and constructs its corresponding - "dotted path/attribute" - - Args: - ho (HardwareObject): The HardwareObject to get the id for - _path (str): Current path (used in recursion) - result: A dictionary where the key is the id of the HardwareObject - and the value its dotted path. - - Returns: - (str): Dotted path for the given HardwareObject - """ - if hasattr(ho, "get_roles"): - for role in ho.get_roles(): - child_ho = ho.get_object_by_role(role) - if child_ho not in result: - result[child_ho] = self._get_id_dict_rec( - child_ho, f"{_path}.{role}", result - ) - - return _path + warn("Beamline.get_hardware_object is Deprecated. Use get_by_id instead") + return self.get_by_id(_id) # Signal handling functions: def emit(self, signal: Union[str, object, Any], *args) -> None: @@ -280,16 +212,6 @@ def emit(self, signal: Union[str, object, Any], *args) -> None: "Signal %s is not connected" % signal ) - # NB this function must be re-implemented in nested subclasses - @property - def all_roles(self): - """Tuple of all content object roles, indefinition and loading order - - Returns: - tuple[text_str, ...] - """ - return super(Beamline, self).all_roles + tuple(self.__content_roles) - @property def machine_info(self): """Machine information Hardware object @@ -913,17 +835,7 @@ def get_default_characterisation_parameters(self): return self.characterisation.get_default_characterisation_parameters() def force_emit_signals(self): - for role in self.all_roles: - hwobj = getattr(self, role) - if hwobj is not None: - try: - hwobj.force_emit_signals() - for attr in dir(hwobj): - if not attr.startswith("_"): - if hasattr(getattr(hwobj, attr), "force_emit_signals"): - child_hwobj = getattr(hwobj, attr) - child_hwobj.force_emit_signals() - except BaseException as ex: - logging.getLogger("HWR").error( - "Unable to call force_emit_signals (%s)" % str(ex) - ) + hwobjs = list(self.objects_by_role().values()) + for hwobj in hwobjs: + hwobj.force_emit_signals() + hwobjs.extend(hwobj.objects_by_role().values()) diff --git a/mxcubecore/HardwareObjects/BlissHutchTrigger.py b/mxcubecore/HardwareObjects/BlissHutchTrigger.py index c6934316f7..d783665f62 100644 --- a/mxcubecore/HardwareObjects/BlissHutchTrigger.py +++ b/mxcubecore/HardwareObjects/BlissHutchTrigger.py @@ -83,7 +83,7 @@ def abort(self): def macro(self, entering_hutch, **kwargs): logging.info( - "%s: %s hutch", self.name(), "entering" if entering_hutch else "leaving" + "%s: %s hutch", self.id, "entering" if entering_hutch else "leaving" ) ctrl_obj = self.get_object_by_role("controller") ctrl_obj.hutch_actions(entering_hutch, hutch_trigger=True, **kwargs) diff --git a/mxcubecore/HardwareObjects/Camera.py b/mxcubecore/HardwareObjects/Camera.py index c58d48dc48..93903cf586 100644 --- a/mxcubecore/HardwareObjects/Camera.py +++ b/mxcubecore/HardwareObjects/Camera.py @@ -482,7 +482,7 @@ def takeSnapshot(self, *args, **kwargs): # img.save(*args) except Exception: logging.getLogger("HWR").exception( - "%s: could not save snapshot", self.name() + "%s: could not save snapshot", self.id ) else: if len(args): @@ -490,7 +490,7 @@ def takeSnapshot(self, *args, **kwargs): img.save(*args) except Exception: logging.getLogger("HWR").exception( - "%s: could not save snapshot", self.name() + "%s: could not save snapshot", self.id ) else: return True @@ -499,7 +499,7 @@ def takeSnapshot(self, *args, **kwargs): else: logging.getLogger("HWR").error( "%s: could not take snapshot: sorry PIL is not available :-(", - self.name(), + self.id, ) return False @@ -835,7 +835,7 @@ def takeSnapshot(self, *args): # img.save(*args) except Exception: logging.getLogger("HWR").exception( - "%s: could not save snapshot", self.name() + "%s: could not save snapshot", self.id ) else: if len(args): @@ -843,7 +843,7 @@ def takeSnapshot(self, *args): img.save(*args) except Exception: logging.getLogger("HWR").exception( - "%s: could not save snapshot", self.name() + "%s: could not save snapshot", self.id ) else: return True @@ -852,7 +852,7 @@ def takeSnapshot(self, *args): else: logging.getLogger("HWR").error( "%s: could not take snapshot: sorry PIL is not available :-(", - self.name(), + self.id, ) return False diff --git a/mxcubecore/HardwareObjects/DESY/P11SampleChanger.py b/mxcubecore/HardwareObjects/DESY/P11SampleChanger.py index 75a9e95f3e..08cb4b0dd2 100644 --- a/mxcubecore/HardwareObjects/DESY/P11SampleChanger.py +++ b/mxcubecore/HardwareObjects/DESY/P11SampleChanger.py @@ -403,9 +403,9 @@ def _init_sc_contents(self): """ named_samples = {} - if self.has_object("test_sample_names"): - for tag, val in self["test_sample_names"].get_properties().items(): - named_samples[val] = tag + dd1 = self.get_property("test_sample_names") + if dd1: + named_samples.update(dd1) for basket_index in range(self.no_of_baskets): basket = self.get_components()[basket_index] diff --git a/mxcubecore/HardwareObjects/ESRF/BlissHutchTrigger.py b/mxcubecore/HardwareObjects/ESRF/BlissHutchTrigger.py index a7009d06aa..574f52ed42 100644 --- a/mxcubecore/HardwareObjects/ESRF/BlissHutchTrigger.py +++ b/mxcubecore/HardwareObjects/ESRF/BlissHutchTrigger.py @@ -74,14 +74,14 @@ def init(self): self._proxy = DeviceProxy(tango_device) except DevFailed as _traceback: last_error = _traceback[-1] - msg = f"{self.name()}: {last_error['desc']}" + msg = f"{self.id}: {last_error['desc']}" raise RuntimeError(msg) pss = self.get_property("pss_card_ch") try: self.card, self.channel = map(int, pss.split("/")) except AttributeError: - msg = f"{self.name()}: cannot find PSS number" + msg = f"{self.id}: cannot find PSS number" raise RuntimeError(msg) # polling interval [s] diff --git a/mxcubecore/HardwareObjects/ESRF/ID232HutchTrigger.py b/mxcubecore/HardwareObjects/ESRF/ID232HutchTrigger.py index 4400b2720f..e525d7a123 100644 --- a/mxcubecore/HardwareObjects/ESRF/ID232HutchTrigger.py +++ b/mxcubecore/HardwareObjects/ESRF/ID232HutchTrigger.py @@ -88,14 +88,14 @@ def abort(self): def macro(self, entering_hutch, old={"dtox": None}): logging.info( - "%s: %s hutch", self.name(), "entering" if entering_hutch else "leaving" + "%s: %s hutch", self.id, "entering" if entering_hutch else "leaving" ) dtox = HWR.beamline.detector.distance udiff_ctrl = self.get_object_by_role("predefined") ctrl_obj = self.get_object_by_role("controller") if not entering_hutch: if old["dtox"] is not None: - print("Moving %s to %g" % (dtox.name(), old["dtox"])) + print("Moving %s to %g" % (dtox.id, old["dtox"])) dtox.set_value(old["dtox"]) self.flex_device.eval("flex.user_port(0)") self.flex_device.eval("flex.robot_port(1)") diff --git a/mxcubecore/HardwareObjects/ESRF/ID29HutchTrigger.py b/mxcubecore/HardwareObjects/ESRF/ID29HutchTrigger.py index 893161bd4d..7434aae517 100644 --- a/mxcubecore/HardwareObjects/ESRF/ID29HutchTrigger.py +++ b/mxcubecore/HardwareObjects/ESRF/ID29HutchTrigger.py @@ -65,7 +65,7 @@ def abort(self): def macro(self, entering_hutch, old={"dtox": None}): logging.info( - "%s: %s hutch", self.name(), "entering" if entering_hutch else "leaving" + "%s: %s hutch", self.id, "entering" if entering_hutch else "leaving" ) dtox = HWR.beamline.detector.distance if not entering_hutch: diff --git a/mxcubecore/HardwareObjects/ESRF/ID30Cryo.py b/mxcubecore/HardwareObjects/ESRF/ID30Cryo.py index 7f50468dbd..b607f6ef6c 100644 --- a/mxcubecore/HardwareObjects/ESRF/ID30Cryo.py +++ b/mxcubecore/HardwareObjects/ESRF/ID30Cryo.py @@ -16,7 +16,7 @@ def init(self): controller = self.get_object_by_role("controller") self._state = None - self.username = self.name() + self.username = self.name self.wago_controller = getattr(controller, self.wago) self.command_key = self.get_property("cmd") self.in_key = self.get_property("is_in") diff --git a/mxcubecore/HardwareObjects/ESRF/ID30HutchTrigger.py b/mxcubecore/HardwareObjects/ESRF/ID30HutchTrigger.py index 0cc60455cb..a07fbdb564 100644 --- a/mxcubecore/HardwareObjects/ESRF/ID30HutchTrigger.py +++ b/mxcubecore/HardwareObjects/ESRF/ID30HutchTrigger.py @@ -68,7 +68,7 @@ def abort(self): def macro(self, entering_hutch, old={"dtox": None, "aperture": None}): logging.info( - "%s: %s hutch", self.name(), "entering" if entering_hutch else "leaving" + "%s: %s hutch", self.id, "entering" if entering_hutch else "leaving" ) eh_controller = self.get_object_by_role("eh_controller") if not entering_hutch: diff --git a/mxcubecore/HardwareObjects/ESRF/ID30Light.py b/mxcubecore/HardwareObjects/ESRF/ID30Light.py index d53c2a9f1a..4e86669556 100644 --- a/mxcubecore/HardwareObjects/ESRF/ID30Light.py +++ b/mxcubecore/HardwareObjects/ESRF/ID30Light.py @@ -17,7 +17,7 @@ def __init__(self, name): def init(self): controller = self.get_object_by_role("controller") - self.username = self.name() + self.username = self.name self.wago_controller = getattr(controller, self.wago) self.command_key = self.get_property("cmd") self.in_key = self.get_property("is_in") diff --git a/mxcubecore/HardwareObjects/GrobSampleChanger.py b/mxcubecore/HardwareObjects/GrobSampleChanger.py index 3f2cee4a97..28f74fbc25 100644 --- a/mxcubecore/HardwareObjects/GrobSampleChanger.py +++ b/mxcubecore/HardwareObjects/GrobSampleChanger.py @@ -49,7 +49,7 @@ def init(self): self.connect(self.grob, "samples_map", self.samples_map_changed) def connect_notify(self, signal): - logging.info("%s: connect_notify %s", self.name(), signal) + logging.info("%s: connect_notify %s", self.id, signal) if signal == "stateChanged": self.sample_changer_state_changed(self.get_state()) elif signal == "loadedSampleChanged": @@ -99,7 +99,7 @@ def _callSuccessCallback(self): self._successCallback() except Exception: logging.exception( - "%s: exception while calling success callback", self.name() + "%s: exception while calling success callback", self.id ) def _call_failure_callback(self): @@ -108,7 +108,7 @@ def _call_failure_callback(self): self._failureCallback() except Exception: logging.exception( - "%s: exception while calling failure callback", self.name() + "%s: exception while calling failure callback", self.id ) def _sample_transfer_done(self, transfer_greenlet): diff --git a/mxcubecore/HardwareObjects/MAXIV/BIOMAXAperture.py b/mxcubecore/HardwareObjects/MAXIV/BIOMAXAperture.py index 35ccf654bc..8cda310587 100644 --- a/mxcubecore/HardwareObjects/MAXIV/BIOMAXAperture.py +++ b/mxcubecore/HardwareObjects/MAXIV/BIOMAXAperture.py @@ -25,7 +25,7 @@ def init(self): def moveToPosition(self, positionName): logging.getLogger().debug( "%s: trying to move %s to %s:%f", - self.name(), + self.id, self.motor_name, positionName, self.predefinedPositions[positionName], diff --git a/mxcubecore/HardwareObjects/MD2Motor.py b/mxcubecore/HardwareObjects/MD2Motor.py index efc1e428fd..dc8090c64a 100644 --- a/mxcubecore/HardwareObjects/MD2Motor.py +++ b/mxcubecore/HardwareObjects/MD2Motor.py @@ -89,9 +89,7 @@ def updateMotorState(self, motor_states): def motorStateChanged(self, state): logging.getLogger().debug( - "{}: in motorStateChanged: motor state changed to {}".format( - self.name(), state - ) + "{}: in motorStateChanged: motor state changed to {}".format(self.id, state) ) self.emit("stateChanged", (state,)) diff --git a/mxcubecore/HardwareObjects/MicrodiffSamplePseudo.py b/mxcubecore/HardwareObjects/MicrodiffSamplePseudo.py index c3c2a5d25e..3066805fb9 100644 --- a/mxcubecore/HardwareObjects/MicrodiffSamplePseudo.py +++ b/mxcubecore/HardwareObjects/MicrodiffSamplePseudo.py @@ -55,7 +55,7 @@ def updateMotorState(self): def motorStateChanged(self, state): logging.getLogger().debug( - "%s: in motorStateChanged: motor state changed to %s", self.name(), state + "%s: in motorStateChanged: motor state changed to %s", self.id, state ) self.emit("stateChanged", (self.motorState,)) diff --git a/mxcubecore/HardwareObjects/MiniDiff.py b/mxcubecore/HardwareObjects/MiniDiff.py index 676b1679de..09bb665087 100644 --- a/mxcubecore/HardwareObjects/MiniDiff.py +++ b/mxcubecore/HardwareObjects/MiniDiff.py @@ -379,14 +379,13 @@ def phiyMotorStateChanged(self, state): def getCalibrationData(self, offset): if self.zoomMotor is not None: - if self.zoomMotor.has_object("positions"): - for position in self.zoomMotor["positions"]: - if abs(position.offset - offset) <= self.zoomMotor.delta: - calibrationData = position["calibrationData"] - return ( - float(calibrationData.pixelsPerMmY) or 0, - float(calibrationData.pixelsPerMmZ) or 0, - ) + for position in self.zoomMotor.get_property("positions", ()): + if abs(position["offset"] - offset) <= self.zoomMotor.delta: + calibrationData = position["calibrationData"] + return ( + float(calibrationData.pixelsPerMmY) or 0, + float(calibrationData.pixelsPerMmZ) or 0, + ) return (None, None) def get_pixels_per_mm(self): diff --git a/mxcubecore/HardwareObjects/QueueModel.py b/mxcubecore/HardwareObjects/QueueModel.py index ccba096c45..1c6d904cec 100644 --- a/mxcubecore/HardwareObjects/QueueModel.py +++ b/mxcubecore/HardwareObjects/QueueModel.py @@ -28,7 +28,6 @@ import json import logging -import os import jsonpickle @@ -426,7 +425,8 @@ def save_queue(self, filename=None): of dictionaries. Information about samples and baskets is not saved """ if not filename: - filename = os.path.join(self.user_file_directory, "queue_active.dat") + # filename = os.path.join(self.user_file_directory, "queue_active.dat") + filename = "queue_active.dat" items_to_save = [] diff --git a/mxcubecore/HardwareObjects/RobodiffLight.py b/mxcubecore/HardwareObjects/RobodiffLight.py index e97570529d..bb08f8f0fa 100644 --- a/mxcubecore/HardwareObjects/RobodiffLight.py +++ b/mxcubecore/HardwareObjects/RobodiffLight.py @@ -18,7 +18,7 @@ def init(self): controller = self.get_object_by_role("controller") self._state = None - self.username = self.name() + self.username = self.name self.wago_controller = getattr(controller, self.wago) self.command_key = self.get_property("cmd") self.in_key = self.get_property("is_in") diff --git a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1BeamInfo.py b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1BeamInfo.py index 369b3d4303..978ce14f85 100644 --- a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1BeamInfo.py +++ b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1BeamInfo.py @@ -47,7 +47,7 @@ def init(self): self.beamx_chan = self.get_channel_object("beamsizex") except KeyError: logging.getLogger().warning( - "%s: cannot connect to beamsize x channel ", self.name() + "%s: cannot connect to beamsize x channel ", self.id ) try: @@ -55,7 +55,7 @@ def init(self): self.beamy_chan.connect_signal("update", self.beamsize_x_changed) except KeyError: logging.getLogger().warning( - "%s: cannot connect to beamsize y channel ", self.name() + "%s: cannot connect to beamsize y channel ", self.id ) self.zoomMotor = self.get_deviceby_role("zoom") diff --git a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1DetectorDistance.py b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1DetectorDistance.py index 8650a78be4..fe6e34fdce 100644 --- a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1DetectorDistance.py +++ b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1DetectorDistance.py @@ -129,7 +129,7 @@ def _set_value(self, value): self.position_chan.set_value(value) def get_motor_mnemonic(self): - return self.name() + return self.name def check_light(self, position): # ligth is not controlled anymore. it is left in place but the diff --git a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1Energy.py b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1Energy.py index 1f359f0b1e..a90d7c9604 100644 --- a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1Energy.py +++ b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1Energy.py @@ -186,7 +186,7 @@ def set_value(self, value, wait=False): except Exception: logging.getLogger("HWR").error( "%s: Cannot move undulator U20 : State device = %s", - self.name(), + self.id, str(self.und_device.State()), ) @@ -196,21 +196,21 @@ def set_value(self, value, wait=False): except Exception: logging.getLogger("HWR").error( "%s: Cannot move Energy : State device = %s", - self.name(), + self.id, self.get_state(), ) else: logging.getLogger("HWR").error( "%s: Cannot move Energy : State device = %s", - self.name(), + self.id, self.get_state(), ) def set_wavelength(self, value, wait=False): egy_value = self.lambda_to_energy(float(value)) logging.getLogger("HWR").debug( - "%s: Moving wavelength to : %s (egy to %s" % (self.name(), value, egy_value) + "%s: Moving wavelength to : %s (egy to %s" % (self.id, value, egy_value) ) self.set_valuey(egy_value) return value diff --git a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1MiniDiff.py b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1MiniDiff.py index e8a405b5b2..ecf5159bcd 100644 --- a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1MiniDiff.py +++ b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1MiniDiff.py @@ -154,7 +154,7 @@ def centring_done(self, centring_procedure): for motor in motor_pos: position = motor_pos[motor] logging.getLogger("HWR").debug( - " - motor is %s - going to %s" % (motor.name(), position) + " - motor is %s - going to %s" % (motor.id, position) ) self.emit_progress_message("Moving sample to centred position...") @@ -231,7 +231,7 @@ def move_motors(self, motor_positions, timeout=15): motor_positions_copy[motor] = position logging.getLogger("HWR").debug( - " / moving motor. %s to %s" % (motor.name(), position) + " / moving motor. %s to %s" % (motor.id, position) ) self.wait_device_ready(timeout) try: diff --git a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1Resolution.py b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1Resolution.py index db12fbede5..d1fd40af45 100644 --- a/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1Resolution.py +++ b/mxcubecore/HardwareObjects/SOLEIL/PX1/PX1Resolution.py @@ -150,7 +150,7 @@ def stop(self): self.stop_command() except Exception: logging.getLogger("HWR").err( - "%s: PX1Resolution.stop: error while trying to stop!", self.name() + "%s: PX1Resolution.stop: error while trying to stop!", self.id ) def re_emit_values(self): diff --git a/mxcubecore/HardwareObjects/SOLEIL/PX2/PX2BeamInfo.py b/mxcubecore/HardwareObjects/SOLEIL/PX2/PX2BeamInfo.py index afae0df357..325f4440c5 100644 --- a/mxcubecore/HardwareObjects/SOLEIL/PX2/PX2BeamInfo.py +++ b/mxcubecore/HardwareObjects/SOLEIL/PX2/PX2BeamInfo.py @@ -67,7 +67,7 @@ def init(self): self.chanBeamSizeX.connect_signal("update", self.beamSizeXChanged) except KeyError: logging.getLogger().warning( - "%s: cannot connect to beamsize x channel ", self.name() + "%s: cannot connect to beamsize x channel ", self.id ) try: @@ -75,7 +75,7 @@ def init(self): self.chanBeamSizeY.connect_signal("update", self.beamSizeYChanged) except KeyError: logging.getLogger().warning( - "%s: cannot connect to beamsize y channel ", self.name() + "%s: cannot connect to beamsize y channel ", self.id ) try: @@ -83,7 +83,7 @@ def init(self): self.chanBeamPosX.connect_signal("update", self.beamPosXChanged) except KeyError: logging.getLogger().warning( - "%s: cannot connect to beamposition x channel ", self.name() + "%s: cannot connect to beamposition x channel ", self.id ) try: @@ -91,7 +91,7 @@ def init(self): self.chanBeamPosY.connect_signal("update", self.beamPosYChanged) except KeyError: logging.getLogger().warning( - "%s: cannot connect to beamposition z channel ", self.name() + "%s: cannot connect to beamposition z channel ", self.id ) self.zoomMotor = self.get_deviceby_role("zoom") diff --git a/mxcubecore/HardwareObjects/SOLEIL/SOLEILPss.py b/mxcubecore/HardwareObjects/SOLEIL/SOLEILPss.py index 998fbad8c3..f9bfd03f51 100644 --- a/mxcubecore/HardwareObjects/SOLEIL/SOLEILPss.py +++ b/mxcubecore/HardwareObjects/SOLEIL/SOLEILPss.py @@ -49,8 +49,6 @@ def getWagoState(self): return self.get_state(self.stateChan.get_value()) def value_changed(self, value): - logging.getLogger("HWR").info( - "%s: SOLEILPss.valueChanged, %s", self.name(), value - ) + logging.getLogger("HWR").info("%s: SOLEILPss.valueChanged, %s", self.id, value) state = self.get_state(value) self.emit("wagoStateChanged", (state,)) diff --git a/mxcubecore/HardwareObjects/SOLEIL/TangoDCMotor.py b/mxcubecore/HardwareObjects/SOLEIL/TangoDCMotor.py index 079059b558..55419e7bd0 100644 --- a/mxcubecore/HardwareObjects/SOLEIL/TangoDCMotor.py +++ b/mxcubecore/HardwareObjects/SOLEIL/TangoDCMotor.py @@ -90,7 +90,7 @@ def positionChanged(self, value): self.old_value = value except Exception: logging.getLogger("HWR").error( - "%s: TangoDCMotor not responding, %s", self.name(), "" + "%s: TangoDCMotor not responding, %s", self.id, "" ) self.old_value = value @@ -191,7 +191,7 @@ def convertValue(self, value): return retvalue def get_motor_mnemonic(self): - return self.name() + return self.name def _set_value(self, value): """Move the motor to the required position diff --git a/mxcubecore/HardwareObjects/SardanaMotor.py b/mxcubecore/HardwareObjects/SardanaMotor.py index efbb5fbcb3..3e59eaac13 100644 --- a/mxcubecore/HardwareObjects/SardanaMotor.py +++ b/mxcubecore/HardwareObjects/SardanaMotor.py @@ -76,7 +76,7 @@ def init(self): logging.getLogger("HWR").info( "Undefined property actuator_name in xml. Applying name during instance creation." ) - self.actuator_name = self.name() + self.actuator_name = self.name self.threshold = self.get_property("threshold", self.threshold_default) logging.getLogger("HWR").debug( diff --git a/mxcubecore/HardwareObjects/TangoMotor.py b/mxcubecore/HardwareObjects/TangoMotor.py index 73ea920bfc..6a30626a6c 100644 --- a/mxcubecore/HardwareObjects/TangoMotor.py +++ b/mxcubecore/HardwareObjects/TangoMotor.py @@ -201,7 +201,7 @@ def _set_value(self, value): :param value: float :return: """ - self.log.debug("TangoMotor.py - Moving motor %s to %s" % (self.name(), value)) + self.log.debug("TangoMotor.py - Moving motor %s to %s" % (self.id, value)) if self.is_simulation: self.simulated_pos = value else: @@ -222,14 +222,14 @@ def start_moving(self): def _update_state(self): gevent.sleep(0.5) motor_state = self.chan_state.get_value() - self.log.debug(" reading motor state for %s is %s" % (self.name(), str(motor_state))) + self.log.debug(" reading motor state for %s is %s" % (self.id, str(motor_state))) self.motor_state_changed(motor_state) - + def update_value(self, value=None): """Updates motor position""" if value is None: value = self.get_value() - self.latest_value = value + self.latest_value = value super(TangoMotor, self).update_value(value) def get_motor_mnemonic(self): diff --git a/mxcubecore/HardwareObjects/XMLRPCServer.py b/mxcubecore/HardwareObjects/XMLRPCServer.py index 6f3ef7d295..ef18e26994 100644 --- a/mxcubecore/HardwareObjects/XMLRPCServer.py +++ b/mxcubecore/HardwareObjects/XMLRPCServer.py @@ -172,16 +172,11 @@ def open(self): self._server.register_function(self.setCharacterisationResult) # Register functions from modules specified in element - if self.has_object("apis"): - apis = next(self.get_objects("apis")) - for api in apis.get_objects("api"): - recurse = api.get_property("recurse") - if recurse is None: - recurse = True - - self._register_module_functions( - api.get_property("module"), recurse=recurse - ) + apis = next(self.get_property("apis"), ()) + for api in apis.get("api"): + recurse = api.get("recurse", True) + + self._register_module_functions(api.get("module"), recurse=recurse) self.xmlrpc_server_task = gevent.spawn(self._server.serve_forever) self.beamcmds_hwobj = self.get_object_by_role("beamcmds") diff --git a/mxcubecore/HardwareObjects/abstract/AbstractVideoDevice.py b/mxcubecore/HardwareObjects/abstract/AbstractVideoDevice.py index df32f1264a..7567a59a50 100644 --- a/mxcubecore/HardwareObjects/abstract/AbstractVideoDevice.py +++ b/mxcubecore/HardwareObjects/abstract/AbstractVideoDevice.py @@ -112,7 +112,7 @@ def init(self): except TypeError: logging.getLogger().warning( "%s: failed to interpret scale factor for camera.\nUsing default.", - self.name(), + self.id, ) self.cam_scale_factor = self.default_scale_factor @@ -365,7 +365,7 @@ def change_owner(self): os.setuid(int(os.getenv("SUDO_UID"))) except Exception: logging.getLogger().warning( - "%s: failed to change the process ownership.", self.name() + "%s: failed to change the process ownership.", self.id ) def get_width(self): diff --git a/mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py b/mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py index 4a2dab91b3..d25f151fef 100644 --- a/mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py +++ b/mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py @@ -141,9 +141,9 @@ def _init_sc_contents(self): :rtype: None """ named_samples = {} - if self.has_object("test_sample_names"): - for tag, val in self["test_sample_names"].get_properties().items(): - named_samples[val] = tag + dd1 = self.get_property("test_sample_names") + if dd1: + named_samples.update(dd1) for basket_index in range(self.no_of_baskets): basket = self.get_components()[basket_index] diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index e89a9ba088..4892eeb7b5 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -153,7 +153,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if not msg0: try: # instantiate object - result = cls(name=role, **initialise_class) + result = cls(name=role, hwobj_container=_container, **initialise_class) except Exception: if _container: msg0 = "Error instantiating %s" % cls.__name__ @@ -183,7 +183,11 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if not msg0: # Recursively load contained objects (of any type that the system can support) _objects = configuration.pop("_objects", {}) + if _objects: + # Set configuration with non-object properties. + result._config = result.HOCOnfig(**_objects) + load_time = 1000 * (time.time() - start_time) msg1 = "Start loading contents:" _table.append( @@ -206,10 +210,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): class_name1 = "None" else: class_name1 = hwobj.__class__.__name__ - if hasattr(result, role1): - result.replace_object(role1, hwobj) - else: - msg1 = "No such role: %s.%s" % (class_name, role1) + _attach_xml_objects(result, hwobj, role) except Exception as ex: msg1 = "Loading error (%s)" % str(ex) class_name = "" @@ -218,25 +219,9 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): (role1, class_name1, config_file, "%.1d" % load_time, msg1) ) - # Set simple, miscellaneous properties. - # NB the attribute must have been initialied in the class __init__ first. - # If you need data for further processing during init - # that should not remain as attributes - # load them into a pre-defined attribute called '_tmp' - for key, val in configuration.items(): - if hasattr(result, key): - setattr(result, key, val) - else: - logging.getLogger("HWR").error( - "%s has no attribute '%s'", class_name, key - ) - if not msg0: if _container: - if hasattr(_container, role): - _container.replace_object(role, result) - else: - msg0 = "No such role: %s.%s" % (_container.__class__.__name__, role) + setattr(_container._config, role, result) try: # Initialise object result.init() @@ -256,6 +241,36 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): return result +def _attach_xml_objects(container, hwobj, role): + """Recursively attach XML-configured object to container as role + + NBNB guard against duplicate objects""" + hwobj._config = hwobj.HOCOnfig(**hwobj._get_properties()) + hwobj._hwobj_container = container + hwobj._name = role + setattr(container._config, role, hwobj) + objects_by_role = hwobj._objects_by_role + for role2, hwobj2 in objects_by_role.items(): + _attach_xml_objects(hwobj, hwobj2, role2) + for tag in hwobj._objects_names: + if tag not in objects_by_role: + # Complex object, not contained hwobj + objs = list(hwobj._get_objects(tag)) + setattr(hwobj.config, tag, list(_convert_xml_property(obj) for obj in objs)) + + +def _convert_xml_property(hwobj): + """Convert compelx xmkl-configured object""" + result = {} + result.update(hwobj._get_properties()) + for tag in hwobj._objects_names: + # NB this does NOT allow having HardwareObjects inside complex properties + objs = list(hwobj._get_objects(tag)) + result[tag] = list(_convert_xml_property(obj) for obj in objs) + # + return result + + def add_hardware_objects_dirs(ho_dirs): """Adds directories with xml/yaml config files @@ -270,13 +285,15 @@ def add_hardware_objects_dirs(ho_dirs): sys.path.insert(0, new_ho_dir) -def set_user_file_directory(user_file_directory): - """Sets user file directory. - - Args: - user_file_directory (str): absolute path to user file directory - """ - BaseHardwareObjects.HardwareObjectNode.set_user_file_directory(user_file_directory) +# +# +# def set_user_file_directory(user_file_directory): +# """Sets user file directory. +# +# Args: +# user_file_directory (str): absolute path to user file directory +# """ +# BaseHardwareObjects.HardwareObjectNode.set_user_file_directory(user_file_directory) def init_hardware_repository(configuration_path): @@ -315,7 +332,6 @@ def init_hardware_repository(configuration_path): _instance = __HardwareRepositoryClient(configuration_path) _instance.connect() beamline = load_from_yaml(BEAMLINE_CONFIG_FILE, role="beamline") - beamline._hwr_init_done() def uninit_hardware_repository(): @@ -490,10 +506,10 @@ def hardwareObjectDeleted(name=hwobj_instance.name()): hwobj_instance = None comment = "Failed to init class" else: - if hwobj_instance.name() in self.invalid_hardware_objects: + if hwobj_instance.load_name in self.invalid_hardware_objects: self.invalid_hardware_objects.remove(hwobj_instance.name()) - self.hardware_objects[hwobj_instance.name()] = hwobj_instance + self.hardware_objects[hwobj_instance.load_name] = hwobj_instance else: logging.getLogger("HWR").error( "Failed to load Hardware object %s", hwobj_name @@ -855,7 +871,7 @@ def get_info(self, name): for ho in hardware_obj.get_devices(): try: - d["children"][ho.name()] = self.get_info(ho.name()) + d["children"][ho.load_name] = self.get_info(ho.load_name) except Exception: continue diff --git a/mxcubecore/queue_entry/characterisation.py b/mxcubecore/queue_entry/characterisation.py index eaaa662086..cfd0efcba2 100644 --- a/mxcubecore/queue_entry/characterisation.py +++ b/mxcubecore/queue_entry/characterisation.py @@ -105,19 +105,17 @@ def __getstate__(self): d = BaseQueueEntry.__getstate__(self) d["data_analysis_hwobj"] = ( - HWR.beamline.characterisation.name() + HWR.beamline.characterisation.name if HWR.beamline.characterisation else None ) d["diffractometer_hwobj"] = ( - HWR.beamline.diffractometer.name() if HWR.beamline.diffractometer else None + HWR.beamline.diffractometer.name if HWR.beamline.diffractometer else None ) d["queue_model_hwobj"] = ( - HWR.beamline.queue_model.name() if HWR.beamline.queue_model else None - ) - d["session_hwobj"] = ( - HWR.beamline.session.name() if HWR.beamline.session else None + HWR.beamline.queue_model.name if HWR.beamline.queue_model else None ) + d["session_hwobj"] = HWR.beamline.session.name if HWR.beamline.session else None return d diff --git a/mxcubecore/queue_entry/data_collection.py b/mxcubecore/queue_entry/data_collection.py index 29183fd423..3646699ebc 100644 --- a/mxcubecore/queue_entry/data_collection.py +++ b/mxcubecore/queue_entry/data_collection.py @@ -64,8 +64,8 @@ def __getstate__(self): d["shape_history"] = ( HWR.beamline.sample_view.name() if HWR.beamline.sample_view else None ) - d["session"] = HWR.beamline.session.name() if HWR.beamline.session else None - d["lims_client_hwobj"] = HWR.beamline.lims.name() if HWR.beamline.lims else None + d["session"] = HWR.beamline.session.name if HWR.beamline.session else None + d["lims_client_hwobj"] = HWR.beamline.lims.name if HWR.beamline.lims else None return d def execute(self): diff --git a/test/pytest/test_base_hardware_objects.py b/test/pytest/test_base_hardware_objects.py index 87e0dc8c6d..3a867322b8 100644 --- a/test/pytest/test_base_hardware_objects.py +++ b/test/pytest/test_base_hardware_objects.py @@ -383,52 +383,6 @@ def test_set_user_file_directory( assert HardwareObjectNode.user_file_directory == new_path assert hw_obj_node.user_file_directory == new_path - @pytest.mark.parametrize("name", ("test_node_two",)) - def test_set_name(self, hw_obj_node: HardwareObjectNode, name: str): - """Test "set_name" method. - - Args: - hw_obj_node (HardwareObjectNode): Object instance. - name (str): Name. - """ - - # Basic check to make sure the initial return value is a string - assert isinstance(hw_obj_node.name(), str) - - # Call method - hw_obj_node.set_name(name=name) - - # Confirm node name has been updated correctly - assert hw_obj_node.name() == name - - @pytest.mark.parametrize("roles", (("slits", "queue", "session"),)) - def test_get_roles( - self, - mocker: "MockerFixture", - hw_obj_node: HardwareObjectNode, - roles: Tuple[str], - ): - """Test "get_roles" method. - - Args: - mocker (MockerFixture): Instance of the Pytest mocker fixture. - hw_obj_node (HardwareObjectNode): Object instance. - roles (Tuple[str]): Roles. - """ - - # We are only worrying about the dictionary keys, - # as the item values are not being read by the "get_roles" method. - role_values = dict([(key, None) for key in roles]) - - # Patch "_objects_by_role" attribute to test with known values - mocker.patch.dict(hw_obj_node._objects_by_role, values=role_values, clear=True) - - # Call method - res = hw_obj_node.get_roles() - - # Check output list of roles matched test input - assert tuple(res) == roles - @pytest.mark.parametrize( ("initial_path", "new_path"), (("/mnt/data/old_path", "/mnt/data/new_path"),), @@ -463,35 +417,6 @@ def test_set_path( # Validate path updated assert hw_obj_node._path == new_path - @pytest.mark.parametrize("path", ("/mnt/data/file.xml",)) - def test_get_xml_path( - self, - mocker: "MockerFixture", - hw_obj_node: HardwareObjectNode, - path: str, - ): - """Test "get_xml_path" method. - - Args: - mocker (MockerFixture): Instance of the Pytest mocker fixture. - hw_obj_node (HardwareObjectNode): Object instance. - path (str): XML path. - """ - - # Patch "_xml_path" attribute to set known values - mocker.patch.object( - hw_obj_node, - "_xml_path", - new=path, - create=True, - ) - - # Call method - res = hw_obj_node.get_xml_path() - - # Validate correct path returned - assert res == path - @pytest.mark.parametrize( ("objects", "count"), ( @@ -871,7 +796,7 @@ def test_add_object( initial_obj_names: List[str], initial_objects: List[List[Union[HardwareObject, None]]], ): - """Test "add_object" method. + """Test "_add_object" method. Args: mocker (MockerFixture): Instance of the Pytest mocker fixture. @@ -911,7 +836,7 @@ def test_add_object( _initial_value = None # Call method - hw_obj_node.add_object(name=name, hw_object=hw_object, role=role) + hw_obj_node._add_object(name=name, hw_object=hw_object, role=role) _objects_names: List[str] = getattr( hw_obj_node, @@ -1011,7 +936,7 @@ def test_get_objects( initial_obj_names: List[str], initial_objects: List[List[Union[HardwareObject, None]]], ): - """Test "get_objects" method. + """Test "_get_objects" method. Args: mocker (MockerFixture): Instance of the Pytest mocker fixture. @@ -1043,7 +968,7 @@ def test_get_objects( ) # Call method - res = list(hw_obj_node.get_objects(object_name=name)) + res = list(hw_obj_node._get_objects(object_name=name)) if name in _objects_names: # Check output list matches expectations @@ -1192,7 +1117,7 @@ def test_set_property( value: Any, output_value: Union[str, int, float, bool], ): - """Test "set_property" method. + """Test "_set_property" method. Args: mocker (MockerFixture): Instance of the Pytest mocker fixture. @@ -1207,7 +1132,7 @@ def test_set_property( set_property_path_patch = mocker.patch.object(PropertySet, "set_property_path") # Call method, always returns None - hw_obj_node.set_property(name=name, value=value) + hw_obj_node._set_property(name=name, value=value) # Check "PropertySet.__setitem__" patch was called with expected value setitem_patch.assert_called_once_with(*(str(name), output_value)) From 077274b7d145d4abe36ba28cacef3f237aa2fa83 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Tue, 21 May 2024 10:02:12 +0100 Subject: [PATCH 02/53] Second step in moving to compatible XML-YML transition status --- mxcubecore/BaseHardwareObjects.py | 132 ++++++++++++---------- mxcubecore/HardwareObjects/Beamline.py | 8 +- mxcubecore/HardwareRepository.py | 38 ++++--- test/pytest/test_base_hardware_objects.py | 102 ----------------- 4 files changed, 99 insertions(+), 181 deletions(-) diff --git a/mxcubecore/BaseHardwareObjects.py b/mxcubecore/BaseHardwareObjects.py index 11d7f0b75b..4fad6526ee 100644 --- a/mxcubecore/BaseHardwareObjects.py +++ b/mxcubecore/BaseHardwareObjects.py @@ -87,8 +87,19 @@ class DefaultSpecificState(enum.Enum): class ConfiguredObject: """Superclass for classes that take configuration from YAML files""" - class HOConfig(pydantic.BaseModel): - model_config = pydantic.ConfigDict(extra="allow") + # class HOConfig(pydantic.BaseModel): + # model_config = pydantic.ConfigDict(extra="allow") + + class HOConfig: + """Temporary replacement for Pydantic class + + Required during transition, as long as we don't have the fields defined""" + + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + def model_dump(self): + return self.__dict__.copy() def __init__( self, name: str, hwobj_container: Optional["ConfiguredObject"] = None @@ -102,8 +113,8 @@ def __init__( self._config: Optional["ConfiguredObject.HOConfig"] = None self._hwobj_container: Optional[ConfiguredObject] = hwobj_container - def __getattr__(self, name): - return getattr(self._config, name) + def __getattr__(self, attr): + return getattr(self.__dict__["_config"], attr) @property def name(self): @@ -143,16 +154,12 @@ def get_by_id(self, _id: str) -> "ConfiguredObject": # return result - def _init(self) -> None: - """Object initialisation - executed *before* loading contents""" - pass - def init(self) -> None: """Object initialisation - executed *after* loading contents""" pass @property - def objects_by_role(self) -> dict[str, Union[Self, None]]: + def objects_by_role(self) -> Dict[str, Union[Self, None]]: """All contained Objects mapped by role (in specification order). Includes objects defined in subclasses. @@ -160,19 +167,28 @@ def objects_by_role(self) -> dict[str, Union[Self, None]]: Returns: OrderedDict[str, Union[Self, None]]: Contained objects mapped by role. """ - return dict( - tpl - for tpl in self._config.model_dump().items() - if isinstance(tpl[1], ConfiguredObject) - ) + if self._config is not None: + return dict( + tpl + for tpl in self._config.model_dump().items() + if isinstance(tpl[1], ConfiguredObject) + ) + elif isinstance(self, HardwareObjectNode): + # NBNB TEMPORARY for transition to yaml configuration only + return self._objects_by_role.copy() - def get_properties(self) -> dict[str, Any]: + def get_properties(self) -> Dict[str, Any]: """Get configured properties (not roles)""" - return dict( - tpl - for tpl in self._config.model_dump().items() - if not isinstance(tpl[1], ConfiguredObject) and tpl[1] is not None - ) + if self._config is not None: + return dict( + tpl + for tpl in self._config.model_dump().items() + if not isinstance(tpl[1], ConfiguredObject) and tpl[1] is not None + ) + + elif isinstance(self, HardwareObjectNode): + # NBNB TEMPORARY for transition to yaml configuration only + return HardwareObjectNode.get_properties(self) def get_property(self, name: str, default_value: Optional[Any] = None) -> Any: """Get property value or contained HardwareObject. @@ -184,11 +200,15 @@ def get_property(self, name: str, default_value: Optional[Any] = None) -> Any: Returns: Any: Property value. """ - return ( - getattr(self._config, name) - if hasattr(self._config, name) - else default_value - ) + if self._config is not None: + return ( + getattr(self._config, name) + if hasattr(self._config, name) + else default_value + ) + elif isinstance(self, HardwareObjectNode): + # NBNB TEMPORARY for transition to yaml configuration only + return HardwareObjectNode.get_property(self, name, default_value) def get_roles(self) -> List[str]: """Get hardware object roles. @@ -200,11 +220,7 @@ def get_roles(self) -> List[str]: "%s.get_roles is deprecated. Avoid, or use objects_by_role instead" % self.__class__.__name__ ) - return list( - tpl[0] - for tpl in self._config.model_dump().items() - if isinstance(tpl[1], ConfiguredObject) - ) + return list(self.objects_by_role.keys()) def print_log( self, @@ -273,8 +289,7 @@ def get_changes(self) -> Generator[tuple, None, None]: class HardwareObjectNode: """Hardware Object Node""" - # - # user_file_directory: str + user_file_directory: str def __init__(self, node_name: str) -> None: """ @@ -291,15 +306,14 @@ def __init__(self, node_name: str) -> None: self.__references: List[Tuple[str, str, str, int, int, int]] = [] self._xml_path: Union[str, None] = None - # - # @staticmethod - # def set_user_file_directory(user_file_directory: str) -> None: - # """Set user file directory. - # - # Args: - # user_file_directory (str): User file directory path. - # """ - # HardwareObjectNode.user_file_directory = user_file_directory + @staticmethod + def set_user_file_directory(user_file_directory: str) -> None: + """Set user file directory. + + Args: + user_file_directory (str): User file directory path. + """ + HardwareObjectNode.user_file_directory = user_file_directory @property def load_name(self) -> str: @@ -334,7 +348,8 @@ def __len__(self) -> int: def __setattr__(self, attr: str, value: Any) -> None: try: - if attr not in self.__dict__ and attr in self._config.model_dump(): + config = self.__dict__.get("_config") + if attr not in self.__dict__ and config and attr in config.model_dump(): warnings.warn( "%s.__setattr__ is Deprecated. Avoid" % self.__class__.__name__ ) @@ -576,7 +591,19 @@ def _set_property(self, name: str, value: Any) -> None: self._property_set[name] = value self._property_set.set_property_path(name, self._path + "/" + str(name)) - def _get_properties(self) -> PropertySet: + def get_property(self, name: str, default_value: Optional[Any] = None) -> Any: + """Get property value. + + Args: + name (str): Name + default_value (Optional[Any], optional): Default value. Defaults to None. + + Returns: + Any: Property value. + """ + return self._property_set.get(str(name), default_value) + + def get_properties(self) -> PropertySet: """Get properties - for XML-config implementation loading only Returns: @@ -1027,6 +1054,7 @@ def __init__(self, rootName: str) -> None: Args: rootName (str): Name. """ + ConfiguredObject.__init__(self, rootName) HardwareObjectNode.__init__(self, rootName) HardwareObjectMixin.__init__(self) self.log: "Logger" = logging.getLogger("HWR").getChild(self.__class__.__name__) @@ -1075,7 +1103,7 @@ def __getattr__(self, attr: str) -> Union["CommandObject", Any]: return CommandContainer.__getattr__(self, attr) except AttributeError: try: - return HardwareObjectNode.__getattr__(self, attr) + return super().__getattr__(attr) except AttributeError: raise AttributeError(attr) @@ -1125,21 +1153,7 @@ def xml_source(self) -> Union[str, Any]: return get_hardware_repository().xml_source[self.load_name] -class HardwareObjectYaml(ConfiguredObject, HardwareObjectMixin): - """Yaml-configured hardware object. - - For use when we move configuration out of xml and into yaml. - - The class is needed only to provide a single superclass - that combines ConfiguredObject and HardwareObjectMixin""" - - def __init__(self, name: str) -> None: - """ - Args: - name (str): Name. - """ - ConfiguredObject.__init__(self, name) - HardwareObjectMixin.__init__(self) +HardwareObjectYaml = HardwareObject class Procedure(HardwareObject): diff --git a/mxcubecore/HardwareObjects/Beamline.py b/mxcubecore/HardwareObjects/Beamline.py index c654449a0c..98d384bc46 100644 --- a/mxcubecore/HardwareObjects/Beamline.py +++ b/mxcubecore/HardwareObjects/Beamline.py @@ -147,6 +147,10 @@ def __init__(self, name): # 2D-points, (none centred positions) self.enable_2d_points = True + def _init(self) -> None: + """Object initialisation - executed *before* loading contents""" + pass + def init(self): """Object initialisation - executed *after* loading contents""" # Validate acquisition parameters @@ -835,7 +839,7 @@ def get_default_characterisation_parameters(self): return self.characterisation.get_default_characterisation_parameters() def force_emit_signals(self): - hwobjs = list(self.objects_by_role().values()) + hwobjs = list(self.objects_by_role.values()) for hwobj in hwobjs: hwobj.force_emit_signals() - hwobjs.extend(hwobj.objects_by_role().values()) + hwobjs.extend(hwobj.objects_by_role.values()) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 4892eeb7b5..6cfdb55cc4 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -153,7 +153,8 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if not msg0: try: # instantiate object - result = cls(name=role, hwobj_container=_container, **initialise_class) + result = cls(name=role, **initialise_class) + result._hwobj_container = _container except Exception: if _container: msg0 = "Error instantiating %s" % cls.__name__ @@ -186,7 +187,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if _objects: # Set configuration with non-object properties. - result._config = result.HOCOnfig(**_objects) + result._config = result.HOConfig(**_objects) load_time = 1000 * (time.time() - start_time) msg1 = "Start loading contents:" @@ -210,7 +211,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): class_name1 = "None" else: class_name1 = hwobj.__class__.__name__ - _attach_xml_objects(result, hwobj, role) + _attach_xml_objects(result, hwobj, role1) except Exception as ex: msg1 = "Loading error (%s)" % str(ex) class_name = "" @@ -245,14 +246,15 @@ def _attach_xml_objects(container, hwobj, role): """Recursively attach XML-configured object to container as role NBNB guard against duplicate objects""" - hwobj._config = hwobj.HOCOnfig(**hwobj._get_properties()) + hwobj._hwobj_container = container hwobj._name = role + hwobj._config = hwobj.HOConfig(**hwobj.get_properties()) setattr(container._config, role, hwobj) objects_by_role = hwobj._objects_by_role for role2, hwobj2 in objects_by_role.items(): _attach_xml_objects(hwobj, hwobj2, role2) - for tag in hwobj._objects_names: + for tag in hwobj._objects_names(): if tag not in objects_by_role: # Complex object, not contained hwobj objs = list(hwobj._get_objects(tag)) @@ -260,10 +262,10 @@ def _attach_xml_objects(container, hwobj, role): def _convert_xml_property(hwobj): - """Convert compelx xmkl-configured object""" + """Convert complex xml-configured object""" result = {} - result.update(hwobj._get_properties()) - for tag in hwobj._objects_names: + result.update(hwobj.get_properties()) + for tag in hwobj._objects_names(): # NB this does NOT allow having HardwareObjects inside complex properties objs = list(hwobj._get_objects(tag)) result[tag] = list(_convert_xml_property(obj) for obj in objs) @@ -287,13 +289,13 @@ def add_hardware_objects_dirs(ho_dirs): # # -# def set_user_file_directory(user_file_directory): -# """Sets user file directory. -# -# Args: -# user_file_directory (str): absolute path to user file directory -# """ -# BaseHardwareObjects.HardwareObjectNode.set_user_file_directory(user_file_directory) +def set_user_file_directory(user_file_directory): + """Sets user file directory. + + Args: + user_file_directory (str): absolute path to user file directory + """ + BaseHardwareObjects.HardwareObjectNode.set_user_file_directory(user_file_directory) def init_hardware_repository(configuration_path): @@ -476,7 +478,7 @@ def _load_hardware_object(self, hwobj_name=""): dispatcher.send("hardwareObjectLoaded", hwobj_name, self) - def hardwareObjectDeleted(name=hwobj_instance.name()): + def hardwareObjectDeleted(name=hwobj_instance.name): logging.getLogger("HWR").debug( "%s Hardware Object has been deleted from Hardware Repository", name, @@ -502,12 +504,12 @@ def hardwareObjectDeleted(name=hwobj_instance.name()): logging.getLogger("HWR").exception( 'Cannot initialize Hardware Object "%s"', hwobj_name ) - self.invalid_hardware_objects.add(hwobj_instance.name()) + self.invalid_hardware_objects.add(hwobj_instance.name) hwobj_instance = None comment = "Failed to init class" else: if hwobj_instance.load_name in self.invalid_hardware_objects: - self.invalid_hardware_objects.remove(hwobj_instance.name()) + self.invalid_hardware_objects.remove(hwobj_instance.name) self.hardware_objects[hwobj_instance.load_name] = hwobj_instance else: diff --git a/test/pytest/test_base_hardware_objects.py b/test/pytest/test_base_hardware_objects.py index 3a867322b8..80654e0f10 100644 --- a/test/pytest/test_base_hardware_objects.py +++ b/test/pytest/test_base_hardware_objects.py @@ -1,5 +1,4 @@ import copy -from collections import OrderedDict from logging import Logger from typing import ( TYPE_CHECKING, @@ -8,9 +7,6 @@ Generator, Iterator, List, -) -from typing import OrderedDict as TOrderedDict -from typing import ( Tuple, Union, ) @@ -118,104 +114,6 @@ def test_configured_object_setup(self, configured_object: ConfiguredObject): ConfiguredObject, ) - def test_init(self, configured_object: ConfiguredObject): - """Test "_init" and "init" methods. - - Args: - configured_object (ConfiguredObject): Object instance. - """ - - # Call "_init" placeholder method - res = configured_object._init() - assert res is None - - # Call "init" placeholder method - res = configured_object.init() - assert res is None - - # def test_replace_object(self): ... - - @pytest.mark.parametrize( - "content_roles", - ( - [], - ["test1"], - ["test1", "test2"], - ["test1", "test2", "test3"], - ["test2", "test3"], - ), - ) - def test_all_roles( - self, - mocker: "MockerFixture", - configured_object: ConfiguredObject, - content_roles: List[str], - ): - """Test "all_roles" property. - - Args: - mocker (MockerFixture): Instance of the Pytest mocker fixture. - configured_object (ConfiguredObject): Object instance. - content_roles (List[str]): Initial content roles. - """ - - # Patch "__content_roles" with known values - mocker.patch.object( - configured_object, - "_ConfiguredObject__content_roles", - new=copy.deepcopy(content_roles), - ) - - # Check returned result matches patched values - assert configured_object.all_roles == tuple(content_roles) - - @pytest.mark.parametrize( - "initial_objects", - ( - OrderedDict(test1=None), - OrderedDict(test1=None, test2=None, test3=None), - OrderedDict(test1=None, test3=None, test2=None), - OrderedDict(test1=None, test3=None), - ), - ) - def test_all_objects_by_role( - self, - mocker: "MockerFixture", - configured_object: ConfiguredObject, - initial_objects: TOrderedDict[str, None], - ): - """Test "all_objects_by_role" property. - - Args: - mocker (MockerFixture): Instance of the Pytest mocker fixture. - configured_object (ConfiguredObject): Object instance. - initial_objects (TOrderedDict[str, None]): Initial objects. - """ - - # Patch "_objects" with known values - mocker.patch.object( - configured_object, - "_objects", - new=copy.deepcopy(initial_objects), - ) - - # Check returned result matches patched values - assert configured_object.all_objects_by_role == initial_objects - - # def test_procedures( - # self, - # mocker: "MockerFixture", - # configured_object: ConfiguredObject, - # ): - # """ """ - - # # Patch "_procedure_names" with known values - # mocker.patch.object( - # configured_object, - # "_procedure_names", - # new=None, - # ) - class TestPropertySet: """Run tests for "PropertySet" class""" From ddb3f3a29223aa5f23ed501f905030c8f12433f7 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Wed, 22 May 2024 18:16:54 +0100 Subject: [PATCH 03/53] Third step in moving to compatible XML-YML transition status --- mxcubecore/BaseHardwareObjects.py | 33 +++----------- .../HardwareObjects/Gphl/CollectEmulator.py | 6 +-- .../HardwareObjects/Gphl/GphlWorkflow.py | 23 +++++----- .../Gphl/GphlWorkflowConnection.py | 43 +++++++++---------- mxcubecore/HardwareRepository.py | 12 +++--- 5 files changed, 49 insertions(+), 68 deletions(-) diff --git a/mxcubecore/BaseHardwareObjects.py b/mxcubecore/BaseHardwareObjects.py index 4fad6526ee..183e536534 100644 --- a/mxcubecore/BaseHardwareObjects.py +++ b/mxcubecore/BaseHardwareObjects.py @@ -131,6 +131,7 @@ def id(self): obj = self while obj._hwobj_container: names.append(obj.name) + obj = obj._hwobj_container return ".".join(reversed(names)) else: return "" @@ -154,10 +155,6 @@ def get_by_id(self, _id: str) -> "ConfiguredObject": # return result - def init(self) -> None: - """Object initialisation - executed *after* loading contents""" - pass - @property def objects_by_role(self) -> Dict[str, Union[Self, None]]: """All contained Objects mapped by role (in specification order). @@ -661,6 +658,9 @@ def __init__(self) -> None: # List of member names (methods) to be exported (Set at configuration stage) self._exports_config_list = [] + self.log: "Logger" = logging.getLogger("HWR").getChild(self.__class__.__name__) + self.user_log: "Logger" = logging.getLogger("user_log_level") + def __bool__(self) -> Literal[True]: return True @@ -679,6 +679,9 @@ def init(self) -> None: For ConfiguredObjects called after loading contained objects. """ + self._exports_config_list.extend( + ast.literal_eval(self.get_property("exports", "[]").strip()) + ) self._exports = dict.fromkeys(self._exports_config_list, {}) # Add methods that are exported programatically @@ -1057,28 +1060,6 @@ def __init__(self, rootName: str) -> None: ConfiguredObject.__init__(self, rootName) HardwareObjectNode.__init__(self, rootName) HardwareObjectMixin.__init__(self) - self.log: "Logger" = logging.getLogger("HWR").getChild(self.__class__.__name__) - self.user_log: "Logger" = logging.getLogger("user_log_level") - self.__exports: Dict[str, Any] = {} - self.__pydantic_models: Dict[str, Type["BaseModel"]] = {} - self._exported_attributes: Dict[str, Any] = {} - self._exports_config_list = [] - - @property - def exported_attributes(self) -> Dict[str, Any]: - """Get exported attributes. - - Returns: - Dict[str, Any]: Exported attributes. - """ - return self._exported_attributes - - def init(self) -> None: - """Hardware object init.""" - self._exports_config_list.extend( - ast.literal_eval(self.get_property("exports", "[]").strip()) - ) - HardwareObjectMixin.init(self) def __getstate__(self) -> str: diff --git a/mxcubecore/HardwareObjects/Gphl/CollectEmulator.py b/mxcubecore/HardwareObjects/Gphl/CollectEmulator.py index 8f7287721a..5770e13ea5 100644 --- a/mxcubecore/HardwareObjects/Gphl/CollectEmulator.py +++ b/mxcubecore/HardwareObjects/Gphl/CollectEmulator.py @@ -279,14 +279,14 @@ def data_collection_hook(self): simcal_executive = gphl_connection.get_executable("simcal") simcal_licence_dir = ( gphl_connection.get_bdg_licence_dir("simcal") - or gphl_connection.software_paths["GPHL_INSTALLATION"] + or gphl_connection.config.software_paths["GPHL_INSTALLATION"] ) # # Get environmental variables. envs = {"autoPROC_home": simcal_licence_dir} - GPHL_XDS_PATH = gphl_connection.software_paths.get("GPHL_XDS_PATH") + GPHL_XDS_PATH = gphl_connection.config.software_paths.get("GPHL_XDS_PATH") if GPHL_XDS_PATH: envs["GPHL_XDS_PATH"] = GPHL_XDS_PATH - GPHL_CCP4_PATH = gphl_connection.software_paths.get("GPHL_CCP4_PATH") + GPHL_CCP4_PATH = gphl_connection.config.software_paths.get("GPHL_CCP4_PATH") if GPHL_CCP4_PATH: envs["GPHL_CCP4_PATH"] = GPHL_CCP4_PATH text_type = conversion.text_type diff --git a/mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py b/mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py index 636e08ddfa..a742057300 100644 --- a/mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py +++ b/mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py @@ -184,9 +184,8 @@ def __init__(self, name): self._queue_entry = None # Configuration data - set on load - self.workflows = OrderedDict() - self.settings = {} - self.test_crystals = {} + self.settings = self.get_property("settings", {}) + # auxiliary data structure from configuration. Set in init self.workflow_strategies = OrderedDict() @@ -255,7 +254,7 @@ def init(self): # Set standard configurable file paths file_paths = self.file_paths - ss0 = HWR.beamline.gphl_connection.software_paths["gphl_beamline_config"] + ss0 = HWR.beamline.gphl_connection.config.software_paths["gphl_beamline_config"] file_paths["gphl_beamline_config"] = ss0 file_paths["transcal_file"] = os.path.join(ss0, "transcal.nml") file_paths["diffractcal_file"] = os.path.join(ss0, "diffractcal.nml") @@ -274,7 +273,7 @@ def init(self): beamline_hook = "py4j::" # Consolidate workflow options - for title, workflow in self.workflows.items(): + for title, workflow in self.config.workflows.items(): workflow["wfname"] = title opt0 = workflow.get("options", {}) @@ -309,7 +308,7 @@ def shutdown(self): def get_available_workflows(self): """Get list of workflow description dictionaries.""" - return copy.deepcopy(self.workflows) + return copy.deepcopy(self.config.workflows) def query_pre_strategy_params(self, choose_lattice=None): """Query pre_strategy parameters. @@ -813,7 +812,7 @@ def execute(self): # NB - this is really initialising, but we want to do it aftrer WF start # since here the directory we want is set self.recentring_file = os.path.join( - HWR.beamline.gphl_connection.software_paths["GPHL_WDIR"], "recen.nml" + HWR.beamline.gphl_connection.config.software_paths["GPHL_WDIR"], "recen.nml" ) dispatcher.connect( @@ -1775,10 +1774,12 @@ def calculate_recentring(self, okp, ref_okp, ref_xyz): recen_executable = HWR.beamline.gphl_connection.get_executable("recen") # Get environmental variables envs = {} - GPHL_XDS_PATH = HWR.beamline.gphl_connection.software_paths.get("GPHL_XDS_PATH") + GPHL_XDS_PATH = HWR.beamline.gphl_connection.config.software_paths.get( + "GPHL_XDS_PATH" + ) if GPHL_XDS_PATH: envs["GPHL_XDS_PATH"] = GPHL_XDS_PATH - GPHL_CCP4_PATH = HWR.beamline.gphl_connection.software_paths.get( + GPHL_CCP4_PATH = HWR.beamline.gphl_connection.config.software_paths.get( "GPHL_CCP4_PATH" ) if GPHL_CCP4_PATH: @@ -2647,7 +2648,7 @@ def get_emulation_samples(self): """ crystal_file_name = "crystal.nml" result = [] - sample_dir = HWR.beamline.gphl_connection.software_paths.get( + sample_dir = HWR.beamline.gphl_connection.config.software_paths.get( "gphl_test_samples" ) serial = 0 @@ -2734,7 +2735,7 @@ def get_emulation_sample_dir(self, sample_name=None): if sample_name: if sample_name.startswith(self.TEST_SAMPLE_PREFIX): sample_name = sample_name[len(self.TEST_SAMPLE_PREFIX) + 1 :] - sample_dir = HWR.beamline.gphl_connection.software_paths.get( + sample_dir = HWR.beamline.gphl_connection.config.software_paths.get( "gphl_test_samples" ) if not sample_dir: diff --git a/mxcubecore/HardwareObjects/Gphl/GphlWorkflowConnection.py b/mxcubecore/HardwareObjects/Gphl/GphlWorkflowConnection.py index f6fdb77a4b..08a156fcb2 100644 --- a/mxcubecore/HardwareObjects/Gphl/GphlWorkflowConnection.py +++ b/mxcubecore/HardwareObjects/Gphl/GphlWorkflowConnection.py @@ -108,13 +108,10 @@ def __init__(self, name): self.collect_emulator_process = None # Configured parameters - self.directory_locations = {} - self.ssh_options = {} - self.gphl_subdir = "GPHL" - self.gphl_persistname = "persistence" - self.connection_parameters = {} - self.software_paths = {} - self.software_properties = {} + self.ssh_options = self.get_property("ssh_options", {}) + self.gphl_subdir = self.get_property("gphl_subdir", "GPHL") + self.gphl_persistname = self.get_property("gphl_persistname", "persistence") + self.connection_parameters = self.get_property("connection_parameters", {}) self.update_state(self.STATES.UNKNOWN) @@ -126,10 +123,10 @@ def init(self): self.connection_parameters["python_address"] = socket.gethostname() # Adapt paths and properties to use directory_locations - locations = self.directory_locations + locations = self.config.directory_locations installdir = locations["GPHL_INSTALLATION"] - paths = self.software_paths - properties = self.software_properties + paths = self.config.software_paths + properties = self.config.software_properties for tag, val in paths.items(): val2 = val.format(**locations) @@ -167,9 +164,11 @@ def to_java_time(self, time_in): def get_executable(self, name): """Get location of executable binary for program called 'name'""" tag = "co.gphl.wf.%s.bin" % name - result = self.software_paths.get(tag) + result = self.config.software_paths.get(tag) if not result: - result = os.path.join(self.software_paths["GPHL_INSTALLATION"], "exe", name) + result = os.path.join( + self.config.software_paths["GPHL_INSTALLATION"], "exe", name + ) # return result @@ -177,7 +176,7 @@ def get_bdg_licence_dir(self, name): """Get directory containing specific licence file (if any) for program called 'name'""" tag = "co.gphl.wf.%s.bdg_licence_dir" % name - result = self.software_paths.get(tag) + result = self.config.software_paths.get(tag) # return result @@ -235,7 +234,7 @@ def start_workflow(self, workflow_queue, workflow_model_obj): self.msg_class_imported = False # Cannot be done in init, where the api.sessions link is not yet ready - self.software_paths["GPHL_WDIR"] = os.path.join( + self.config.software_paths["GPHL_WDIR"] = os.path.join( HWR.beamline.session.get_base_process_directory(), self.gphl_subdir ) @@ -256,7 +255,7 @@ def start_workflow(self, workflow_queue, workflow_model_obj): else: command_list = [] runworkflow_opts = [] - command_list.append(self.software_paths["runworkflow"]) + command_list.append(self.config.software_paths["runworkflow"]) # # HACK - debug options REMOVE! # import socket @@ -291,7 +290,7 @@ def start_workflow(self, workflow_queue, workflow_model_obj): path_template = workflow_model_obj.get_path_template() if "prefix" in workflow_options: workflow_options["prefix"] = path_template.base_prefix - workflow_options["wdir"] = self.software_paths["GPHL_WDIR"] + workflow_options["wdir"] = self.config.software_paths["GPHL_WDIR"] workflow_options["persistname"] = self.gphl_persistname # Set the workflow root subdirectory parameter from the base image directory @@ -319,7 +318,7 @@ def start_workflow(self, workflow_queue, workflow_model_obj): command_list.extend( conversion.java_property(keyword, value, quote_value=in_shell) ) - for keyword, value in self.software_properties.items(): + for keyword, value in self.config.software_properties.items(): command_list.extend( conversion.java_property(keyword, value, quote_value=in_shell) ) @@ -356,17 +355,17 @@ def start_workflow(self, workflow_queue, workflow_model_obj): # These env variables are needed in some cases for wrapper scripts # Specifically for the stratcal wrapper. - envs["GPHL_INSTALLATION"] = self.software_paths["GPHL_INSTALLATION"] - GPHL_XDS_PATH = self.software_paths.get("GPHL_XDS_PATH") + envs["GPHL_INSTALLATION"] = self.config.software_paths["GPHL_INSTALLATION"] + GPHL_XDS_PATH = self.config.software_paths.get("GPHL_XDS_PATH") if GPHL_XDS_PATH: envs["GPHL_XDS_PATH"] = GPHL_XDS_PATH - GPHL_CCP4_PATH = self.software_paths.get("GPHL_CCP4_PATH") + GPHL_CCP4_PATH = self.config.software_paths.get("GPHL_CCP4_PATH") if GPHL_CCP4_PATH: envs["GPHL_CCP4_PATH"] = GPHL_CCP4_PATH - GPHL_AUTOPROC_PATH = self.software_paths.get("GPHL_AUTOPROC_PATH") + GPHL_AUTOPROC_PATH = self.config.software_paths.get("GPHL_AUTOPROC_PATH") if GPHL_AUTOPROC_PATH: envs["GPHL_AUTOPROC_PATH"] = GPHL_AUTOPROC_PATH - GPHL_MINICONDA_PATH = self.software_paths.get("GPHL_MINICONDA_PATH") + GPHL_MINICONDA_PATH = self.config.software_paths.get("GPHL_MINICONDA_PATH") if GPHL_MINICONDA_PATH: envs["GPHL_MINICONDA_PATH"] = GPHL_MINICONDA_PATH if runworkflow_opts: diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 6cfdb55cc4..56b48b9394 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -184,18 +184,19 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if not msg0: # Recursively load contained objects (of any type that the system can support) _objects = configuration.pop("_objects", {}) + # Set configuration with non-object properties. + result._config = result.HOConfig(**configuration) if _objects: - # Set configuration with non-object properties. - result._config = result.HOConfig(**_objects) - load_time = 1000 * (time.time() - start_time) msg1 = "Start loading contents:" _table.append( (role, class_name, configuration_file, "%.1d" % load_time, msg1) ) msg0 = "Done loading contents" + for role1, config_file in _objects.items(): + fname, fext = os.path.splitext(config_file) if fext in (".yaml", ".yml"): load_from_yaml( @@ -204,22 +205,20 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): elif fext == ".xml": msg1 = "" time0 = time.time() + class_name1 = "None" try: hwobj = _instance.get_hardware_object(fname) if hwobj is None: msg1 = "No object loaded" - class_name1 = "None" else: class_name1 = hwobj.__class__.__name__ _attach_xml_objects(result, hwobj, role1) except Exception as ex: msg1 = "Loading error (%s)" % str(ex) - class_name = "" load_time = 1000 * (time.time() - time0) _table.append( (role1, class_name1, config_file, "%.1d" % load_time, msg1) ) - if not msg0: if _container: setattr(_container._config, role, result) @@ -227,6 +226,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): # Initialise object result.init() except Exception: + raise if _container: msg0 = "Error in %s.init()" % cls.__name__ else: From 93ed233cf23a862b72aba5ad0ba396f73a5c3161 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Thu, 23 May 2024 12:53:34 +0100 Subject: [PATCH 04/53] Fourth step in moving to compatible XML-YML transition status. ConfiguredObject now cleaned --- mxcubecore/HardwareObjects/Beamline.py | 644 ++---------------- .../HardwareObjects/EMBL/EMBLBeamline.py | 128 ---- .../EMBL/EMBLOnlineProcessing.py | 4 +- .../HardwareObjects/ESRF/ESRFMultiCollect.py | 6 +- .../HardwareObjects/Gphl/GphlWorkflow.py | 73 +- .../Gphl/GphlWorkflowConnection.py | 41 +- .../LNLS/LNLSDiffractometer.py | 2 +- .../HardwareObjects/QtGraphicsManager.py | 2 +- mxcubecore/HardwareObjects/UnitTest.py | 6 +- .../abstract/AbstractProcedure.py | 1 - .../abstract/AbstractXrayCentring.py | 4 +- .../mockup/MultiCollectMockup.py | 2 +- .../embl_hh_p14/beamline_config.yml | 2 +- mxcubecore/model/queue_model_objects.py | 10 +- test/pytest/test_procedure.py | 18 +- 15 files changed, 166 insertions(+), 777 deletions(-) delete mode 100644 mxcubecore/HardwareObjects/EMBL/EMBLBeamline.py diff --git a/mxcubecore/HardwareObjects/Beamline.py b/mxcubecore/HardwareObjects/Beamline.py index 98d384bc46..4c0850e018 100644 --- a/mxcubecore/HardwareObjects/Beamline.py +++ b/mxcubecore/HardwareObjects/Beamline.py @@ -59,93 +59,93 @@ class Beamline(ConfiguredObject): """Beamline class serving as singleton container for links to HardwareObjects""" - # Roles of defined objects and the category they belong to - # NB the double underscore is deliberate - attribute must be hidden from subclasses - __content_roles = [] - - # Names of procedures under Beamline - set of strings. - # NB subclasses must add additional procedures to this set, - # and may NOT override _procedure_names - _procedure_names = set() - - # NBNB these should be accessed ONLY as beamline.SUPPORTED_..._PARAMETERS - # NBNB Subclasses may add local parameters but may NOT remove any - # - # Supported acquisition parameter tags: - SUPPORTED_ACQ_PARAMETERS = frozenset( - ( - "exp_time", - "osc_range", - "num_passes", - "first_image", - "run_number", - "overlap", - "num_images", - "inverse_beam", - "take_dark_current", - "skip_existing_images", - "take_snapshots", - ) - ) - # Supported limit parameter tags: - SUPPORTED_LIMIT_PARAMETERS = frozenset( - ("exposure_time", "osc_range", "number_of_images", "kappa", "kappa_phi") - ) - - def __init__(self, name): - """ + class HOConfig(ConfiguredObject.HOConfig): - Args: - name (str) : Object name, generally set to the role name of the object - """ - super(Beamline, self).__init__(name) + # Properties - definition and default values # List[str] of advanced method names - self.advanced_methods = [] + advanced_methods = [] # List[str] of available methods - self.available_methods = [] + available_methods = [] # int number of clicks used for click centring - self.click_centring_num_clicks = 3 + click_centring_num_clicks = 3 # bool Is wavelength tunable - self.tunable_wavelength = False + tunable_wavelength = False # bool Disable number-of-passes widget NBNB TODO Move elsewhere?? - self.disable_num_passes = False - - # bool By default run processing of (certain?)data collections? - self.run_offline_processing = False + disable_num_passes = False # bool By default run online processing (characterization/mesh?) - self.run_online_processing = False + run_online_processing = False - self.offline_processing_methods = [] + offline_processing_methods = [] - self.online_processing_methods = [] + online_processing_methods = [] # Dictionary-of-dictionaries of default acquisition parameters - self.default_acquisition_parameters = {} + default_acquisition_parameters = {} # Dictionary of acquisition parameter limits - self.acquisition_limit_values = {} + acquisition_limit_values = {} # int Starting run number for path_template - self.run_number = 1 + run_number = 1 # List of undulators - self.undulators = [] + undulators = [] # Format of mesh result for display - self.mesh_result_format = "PNG" + mesh_result_format = "PNG" # bool Use the native mesh feature available, true by default - self.use_native_mesh = True + use_native_mesh = True # bool Enable features to work with points in the plane, called # 2D-points, (none centred positions) - self.enable_2d_points = True + enable_2d_points = True + + # Contained hardware objects + + machine_info = None + transmission = None + cryo = None + energy = None + flux = None + beam = None + hutch_interlock = None + safety_shutter = None + fast_shutter = None + diffractometer = None + detector = None + resolution = None + sample_changer = None + sample_changer_maintenance = None + plate_manipulator = None + session = None + lims = None + sample_view = None + queue_manager = None + queue_model = None + collect = None + xrf_spectrum = None + energy_scan = None + imaging = None + beamline_actions = None + xml_rpc_server = None + workflow = None + control = None + gphl_workflow = None + gphl_connection = None + xray_centring = None + online_processing = None + offline_processing = None + characterisation = None + image_tracking = None + mock_procedures = None + data_publisher = None def _init(self) -> None: """Object initialisation - executed *before* loading contents""" @@ -153,24 +153,7 @@ def _init(self) -> None: def init(self): """Object initialisation - executed *after* loading contents""" - # Validate acquisition parameters - for acquisition_type, params in self.default_acquisition_parameters.items(): - unrecognised = [x for x in params if x not in self.SUPPORTED_ACQ_PARAMETERS] - if unrecognised: - logging.getLogger("HWR").warning( - "Unrecognised acquisition parameters for %s: %s" - % (acquisition_type, unrecognised) - ) - # Validate limits parameters - unrecognised = [ - x - for x in self.acquisition_limit_values - if x not in self.SUPPORTED_LIMIT_PARAMETERS - ] - if unrecognised: - logging.getLogger("HWR").warning( - "Unrecognised parameter limits for: %s" % unrecognised - ) + pass def _hwr_init_done(self): """ @@ -216,509 +199,6 @@ def emit(self, signal: Union[str, object, Any], *args) -> None: "Signal %s is not connected" % signal ) - @property - def machine_info(self): - """Machine information Hardware object - - Returns: - Optional[AbstractMachineInfo]: - """ - return self._objects.get("machine_info") - - __content_roles.append("machine_info") - - @property - def authenticator(self): - """Authenticator Hardware object - - Returns: - Optional[AbstractAuthenticator]: - """ - return self._objects.get("authenticator") - - __content_roles.append("authenticator") - - @property - def transmission(self): - """Transmission Hardware object - - Returns: - Optional[AbstractTransmission]: - """ - return self._objects.get("transmission") - - __content_roles.append("transmission") - - @property - def cryo(self): - """Cryo Hardware object - - Returns: - Optional[AbstractActuator]: - """ - return self._objects.get("cryo") - - __content_roles.append("cryo") - - @property - def energy(self): - """Energy Hardware object - - Returns: - Optional[AbstractEnergy]: - """ - return self._objects.get("energy") - - __content_roles.append("energy") - - @property - def flux(self): - """Flux Hardware object - - Returns: - Optional[AbstractActuator]: - """ - return self._objects.get("flux") - - __content_roles.append("flux") - - @property - def beam(self): - """Beam Hardware object - - Returns: - Optional[AbstractBeam]: - """ - return self._objects.get("beam") - - __content_roles.append("beam") - - @property - def hutch_interlock(self): - """Hutch Interlock Hardware object - - Returns: - Optional[AbstractInterlock]: - """ - return self._objects.get("hutch_interlock") - - __content_roles.append("hutch_interlock") - - @property - def sample_environment(self): - """Sample Environment Hardware Object - - Returns: - Optional[AbstractSampleEnvironment]: - """ - return self._objects.get("sample_environment") - - __content_roles.append("sample_environment") - - @property - def safety_shutter(self): - """Safety Shutter Hardware object - - Returns: - Optional[AbstractShutter]: - """ - return self._objects.get("safety_shutter") - - __content_roles.append("safety_shutter") - - @property - def fast_shutter(self): - """Fast Shutter Hardware object - - Returns: - Optional[AbstractShutter]: - """ - return self._objects.get("fast_shutter") - - __content_roles.append("fast_shutter") - - @property - def diffractometer(self): - """Diffractometer Hardware object - - Returns: - Optional[AbstractDiffractometer]: - """ - return self._objects.get("diffractometer") - - __content_roles.append("diffractometer") - - @property - def detector(self): - """Detector Hardware object - - Returns: - Optional[AbstractDetector]: - """ - return self._objects.get("detector") - - __content_roles.append("detector") - - @property - def resolution(self): - """Resolution Hardware object - - Returns: - Optional[AbstractActuator]: - """ - return self._objects.get("resolution") - - __content_roles.append("resolution") - - @property - def sample_changer(self): - """Sample Changer Hardware object - can be a sample changer, plate_manipulator, jets, chips - - Returns: - Optional[AbstractSampleChanger]: - """ - return self._objects.get("sample_changer") - - __content_roles.append("sample_changer") - - @property - def sample_changer_maintenance(self): - """Sample Changer Maintnance Hardware object - - Returns: - Optional[AbstractMaintnanceSampleChanger]: - """ - return self._objects.get("sample_changer_maintenance") - - __content_roles.append("sample_changer_maintenance") - - @property - def harvester(self): - """Harvester Hardware object - can be a sample or plate holder - - Returns: - Optional[AbstractHarvester]: - """ - return self._objects.get("harvester") - - __content_roles.append("harvester") - - @property - def harvester_maintenance(self): - """harvester maintenance Hardware object - - Returns: - Optional[Harvester]: - """ - return self._objects.get("harvester_maintenance") - - __content_roles.append("harvester_maintenance") - - @property - def plate_manipulator(self): - """**DEPRECATED** - Plate Manipulator Hardware object - NBNB TODO REMOVE THIS From qt version usage and - and call HWR.beamline.sample_changer instead as plate_manipulator being - treated as an alternative sample changer. - - Returns: - Optional[AbstractSampleChanger]: - """ - return self._objects.get("plate_manipulator") - - __content_roles.append("plate_manipulator") - - @property - def session(self): - """Session Hardware object, holding information on current session and user. - - Returns: - Optional[Session]: - """ - return self._objects.get("session") - - __content_roles.append("session") - - @property - def lims(self): - """LIMS client object. - - Returns: - Optional[ISPyBClient]: - """ - return self._objects.get("lims") - - __content_roles.append("lims") - - @property - def sample_view(self): - """Sample view object. Includes defined shapes. - - Returns: - Optional[AbstractSampleView]: - """ - return self._objects.get("sample_view") - - __content_roles.append("sample_view") - - @property - def queue_manager(self): - """Queue manager object. - - Returns: - Optional[QueueManager]: - """ - return self._objects.get("queue_manager") - - __content_roles.append("queue_manager") - - @property - def queue_model(self): - """Queue model object. - - Returns: - Optional[QueueModel]: - """ - return self._objects.get("queue_model") - - __content_roles.append("queue_model") - - # Procedures - - @property - def collect(self): - """Data collection procedure. - - Returns: - Optional[AbstractCollect]: - """ - return self._objects.get("collect") - - __content_roles.append("collect") - - @property - def xrf_spectrum(self): - """X-ray fluorescence spectrum procedure. - - Returns: - Optional[AbstractProcedure] - """ - return self._objects.get("xrf_spectrum") - - __content_roles.append("xrf_spectrum") - - @property - def energy_scan(self): - """Energy scan procedure. - - Returns: - Optional[AbstractProcedure]: - """ - return self._objects.get("energy_scan") - - __content_roles.append("energy_scan") - - @property - def imaging(self): - """Imaging procedure. - - Returns: - Optional[AbstractProcedure]: - """ - return self._objects.get("imaging") - - __content_roles.append("imaging") - - @property - def beamline_actions(self): - """Beamline Actions - - Returns: - Optional[beamline_actions]: - """ - return self._objects.get("beamline_actions") - - __content_roles.append("beamline_actions") - - @property - def xml_rpc_server(self): - """XMLRPCServer for RPC - - Returns: - Optional[XMLRPCServer]: - """ - return self._objects.get("xml_rpc_server") - - __content_roles.append("xml_rpc_server") - - @property - def workflow(self): - """Standarad EDNA workflow procedure. - - Returns: - Optional[Workflow]: - """ - return self._objects.get("workflow") - - __content_roles.append("workflow") - - @property - def control(self): - """Beamline control system - - Returns: - Optional[Control]: - """ - return self._objects.get("control") - - __content_roles.append("control") - - @property - def gphl_workflow(self): - """Global phasing data collection workflow procedure. - - Returns: - Optional[GphlWorkflow]: - """ - return self._objects.get("gphl_workflow") - - __content_roles.append("gphl_workflow") - - # This one is 'hardware', but it is put with its companion - @property - def gphl_connection(self): - """Global PHasing workflow remote connection - - Returns: - Optional[GphlWorkflowConnection]: - """ - return self._objects.get("gphl_connection") - - __content_roles.append("gphl_connection") - - # centring - - # NB Could centring be treated as procedures instesad? - - @property - def centring(self): - """Centring procedures object. Includes X-ray, n-click, optical, move_to_beam - - Returns: - Optional[AbstractCentring]: - """ - return self._objects.get("centring") - - __content_roles.append("centring") - - @property - def xray_centring(self): - """Xray Ccntring hardware object. - - Returns: - Optional[XrayCentring2]: - """ - return self._objects.get("xray_centring") - - __content_roles.append("xray_centring") - - # Analysis (combines processing and data analysis) - - @property - def online_processing(self): - """Synchronous (on-line) data processing procedure. - - Returns: - Optional[AbstractProcessing]: - """ - return self._objects.get("online_processing") - - __content_roles.append("online_processing") - - @property - def offline_processing(self): - """Asynchronous (queue sumbission) data processing procedure. - - Returns: - Optional[AbstractProcessing]: - """ - return self._objects.get("offline_processing") - - __content_roles.append("offline_processing") - - @property - def characterisation(self): - """EDNA characterisation and analysis procedure. - - NB the current code looks rather EDNA-specific - to be called 'AbsatractCharacterisation'. - Potentially we could generalise it, and maybe make it into a procedure??? - - Returns: - Optional[EdnaCharacterisation]: - """ - return self._objects.get("characterisation") - - __content_roles.append("characterisation") - - @property - def beam_realign(self): - """Beam-realign procedure object - - Returns: - Optional[AbstractProcedure]: - """ - return self._objects.get("beam_realign") - - __content_roles.append("beam_realign") - - @property - def image_tracking(self): - """Imaging tracking object - - Returns: - Optional[HardwareObject]: - """ - return self._objects.get("image_tracking") - - __content_roles.append("image_tracking") - - # Procedures - - @property - def mock_procedure(self): - """ """ - return self._objects.get("mock_procedure") - - __content_roles.append("mock_procedure") - - @property - def data_publisher(self): - """ """ - return self._objects.get("data_publisher") - - __content_roles.append("data_publisher") - - # NB this is just an example of a globally shared procedure description - @property - def manual_centring(self): - """Manual centring Procedure - - NB AbstractManualCentring serves to define the parameters for manual centring - The actual implementation is set by configuration, - and can be given as an AbstractManualCentring subclass on each beamline - - Returns: - Optional[AbstractManualCentring] - """ - return self._objects.get("manual_centring") - - __content_roles.append("manual_centring") - # Registers this object as a procedure: - _procedure_names.add("manual_centring") # Additional functions @@ -735,9 +215,11 @@ def get_default_acquisition_parameters(self, acquisition_type="default"): acq_parameters = queue_model_objects.AcquisitionParameters() - params = self.default_acquisition_parameters["default"].copy() + print ('@~@~ default_acquisition_parameters', self.config.default_acquisition_parameters) + + params = self.config.default_acquisition_parameters["default"].copy() if acquisition_type != "default": - dd0 = self.default_acquisition_parameters.get(acquisition_type) + dd0 = self.config.default_acquisition_parameters.get(acquisition_type) if dd0 is None: logging.getLogger("HWR").warning( "No separate parameters for acquisition type: %s - using default." @@ -831,7 +313,7 @@ def get_default_path_template(self): path_template.start_num = acq_params.first_image path_template.num_files = acq_params.num_images - path_template.run_number = self.run_number + path_template.run_number = self.config.run_number return path_template diff --git a/mxcubecore/HardwareObjects/EMBL/EMBLBeamline.py b/mxcubecore/HardwareObjects/EMBL/EMBLBeamline.py deleted file mode 100644 index 5ae39a0c51..0000000000 --- a/mxcubecore/HardwareObjects/EMBL/EMBLBeamline.py +++ /dev/null @@ -1,128 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# -# Project: MXCuBE -# https://github.com/mxcube -# -# This file is part of MXCuBE software. -# -# MXCuBE is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# MXCuBE is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with MXCuBE. If not, see . - -"""Beamline class serving as singleton container for links to top-level HardwareObjects - -All HardwareObjects -""" - -from __future__ import ( - absolute_import, - division, - print_function, - unicode_literals, -) - -__copyright__ = """ Copyright © 2019 by the MXCuBE collaboration """ -__license__ = "LGPLv3+" -__author__ = "Rasmus H Fogh" - -from collections import OrderedDict - -from mxcubecore.HardwareObjects.Beamline import Beamline - - -class EMBLBeamline(Beamline): - """Beamline class serving as singleton container for links to HardwareObjects""" - - # Roles of defined objects and the category they belong to - # NB the double underscore is deliberate - attribute must be hidden from subclasses - __content_roles = [] - - def __init__(self, name): - """ - - Args: - name (str) : Object name, generally saet to teh role name of the object - """ - super(EMBLBeamline, self).__init__(name) - - # NB this function must be re-implemented in nested subclasses - @property - def all_roles(self): - """Tuple of all content object roles, indefinition and loading order - - Returns: - tuple[text_str, ...] - """ - return super(EMBLBeamline, self).all_roles + tuple(self.__content_roles) - - # Additional properties - - @property - def ppu_control(self): - """PPU control Hardware object - - Returns: - Optional[HardwareObject]: - """ - return self._objects.get("ppu_control") - - __content_roles.append("ppu_control") - - - @property - def front_light(self): - """Diffractometer front light - - Returns: - Optional[HardwareObject]: - """ - return self._objects.get("front_light") - - __content_roles.append("front_light") - - - @property - def back_light(self): - """Diffractometer back light - - Returns: - Optional[HardwareObject]: - """ - return self._objects.get("back_light") - - __content_roles.append("back_light") - - @property - def beam_centering(self): - return self._objects.get("beam_centering") - - __content_roles.append("beam_centering") - - - # Additional procedures - - # NB this is just an example of a beamline-specific procedure description - @property - def xray_centring(self): - """ X-ray Centring Procedure - - NB EMBLXrayCentring is defined in EMBL-specific code, like EMBLBeamline - - Returns: - Optional[EMBLXrayCentring] - """ - return self._objects.get("xray_centring") - - __content_roles.append("xray_centring") - # Registers this object as a procedure: - Beamline._procedure_names.add("xray_centring") diff --git a/mxcubecore/HardwareObjects/EMBL/EMBLOnlineProcessing.py b/mxcubecore/HardwareObjects/EMBL/EMBLOnlineProcessing.py index 41aedd912b..7f88f9ced6 100644 --- a/mxcubecore/HardwareObjects/EMBL/EMBLOnlineProcessing.py +++ b/mxcubecore/HardwareObjects/EMBL/EMBLOnlineProcessing.py @@ -492,7 +492,7 @@ def finish_processing(self): self.all_frames_dozor_is = False self.all_frames_batch_processed = False self.is_count = 0 - self.batch_count = 0 + self.batch_count = 0 def dozor_average_i_changed(self, average_i_value): if self.started: @@ -730,7 +730,7 @@ def start_crystfel_autoproc_launchjob(self, proc_params, all_file_filename): acq_params = self.data_collection.acquisitions[0].acquisition_parameters sample_basename = self.params_dict["template"].split("/")[-1].split("_%d_%0")[0] - # stream_filename = sample_basename + "_crystfel_xgandalf.stream" + # stream_filename = sample_basename + "_crystfel_xgandalf.stream" stream_filename = sample_basename + cell_name_tag + "_crystfel.stream" geom_filename = "crystfel_detector.geom" cell_filename = "crystfel_cell" + cell_name_tag + ".cell" diff --git a/mxcubecore/HardwareObjects/ESRF/ESRFMultiCollect.py b/mxcubecore/HardwareObjects/ESRF/ESRFMultiCollect.py index e1fa0da575..e00222444a 100644 --- a/mxcubecore/HardwareObjects/ESRF/ESRFMultiCollect.py +++ b/mxcubecore/HardwareObjects/ESRF/ESRFMultiCollect.py @@ -170,13 +170,13 @@ def oscil(self, start, end, exptime, number_of_images, wait=False): return self.execute_command("do_oscillation", start, end, exptime, number_of_images) def set_wavelength(self, wavelength): - if HWR.beamline.tunable_wavelength: + if HWR.beamline.config.tunable_wavelength: return HWR.beamline.energy.set_wavelength(wavelength) else: return def set_energy(self, energy): - if HWR.beamline.tunable_wavelength: + if HWR.beamline.config.tunable_wavelength: return HWR.beamline.energy.set_value(energy) else: return @@ -457,7 +457,7 @@ def get_undulators_gaps(self): _gaps = {} try: - _gaps = HWR.beamline.undulators + _gaps = HWR.beamline.config.undulators except Exception: logging.getLogger("HWR").exception("Could not get undulator gaps") all_gaps.clear() diff --git a/mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py b/mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py index a742057300..e392295b68 100644 --- a/mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py +++ b/mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py @@ -46,7 +46,10 @@ import gevent.queue from mxcubecore import HardwareRepository as HWR -from mxcubecore.BaseHardwareObjects import HardwareObjectYaml +from mxcubecore.BaseHardwareObjects import ( + ConfiguredObject, + HardwareObject, +) from mxcubecore.dispatcher import dispatcher from mxcubecore.HardwareObjects.Gphl import GphlMessages from mxcubecore.model import ( @@ -162,7 +165,7 @@ class GphlWorkflowStates(enum.Enum): alternative_lattices[atag] = list0 -class GphlWorkflow(HardwareObjectYaml): +class GphlWorkflow(HardwareObject): """Global Phasing workflow runner.""" SPECIFIC_STATES = GphlWorkflowStates @@ -176,6 +179,15 @@ class GphlWorkflow(HardwareObjectYaml): PARAMETERS_READY = "PARAMETERS_READY" PARAMETERS_CANCELLED = "PARAMETERS_CANCELLED" + class HOConfig(ConfiguredObject.HOConfig): + """Temporary replacement for Pydantic class + + Required during transition, as long as we do nto have teh fields defined""" + + # Defaults - should be replaced by proper Pydantic + workflows = {} + settings = {} + def __init__(self, name): super().__init__(name) @@ -183,9 +195,6 @@ def __init__(self, name): # And as a place to get hold of other objects self._queue_entry = None - # Configuration data - set on load - self.settings = self.get_property("settings", {}) - # auxiliary data structure from configuration. Set in init self.workflow_strategies = OrderedDict() @@ -266,7 +275,7 @@ def init(self): self.translation_axis_roles = instrument_data["gonio_centring_axis_names"] # Adapt configuration data - must be done after file_paths setting - if HWR.beamline.gphl_connection.ssh_options: + if HWR.beamline.gphl_connection.config.ssh_options: # We are running workflow through ssh - set beamline url beamline_hook = "py4j:%s:" % socket.gethostname() else: @@ -450,7 +459,7 @@ def query_pre_strategy_params(self, choose_lattice=None): fields["use_cell_for_processing"] = { "title": "Use for indexing", "type": "boolean", - "default": self.settings["defaults"]["use_cell_for_processing"], + "default": self.config.settings["defaults"]["use_cell_for_processing"], } resolution = data_model.aimed_resolution or HWR.beamline.resolution.get_value() resolution = round(resolution, resolution_decimals) @@ -492,10 +501,10 @@ def query_pre_strategy_params(self, choose_lattice=None): if ll0: energy_tag = ll0[0] else: - energy_tag = self.settings["default_beam_energy_tag"] + energy_tag = self.config.settings["default_beam_energy_tag"] else: # Characterisation - strategies = self.settings["characterisation_strategies"] + strategies = self.config.settings["characterisation_strategies"] fields["strategy"]["default"] = strategies[0] fields["strategy"]["title"] = "Characterisation strategy" fields["strategy"]["enum"] = strategies @@ -760,7 +769,7 @@ def pre_execute(self, queue_entry): if params is StopIteration: self.workflow_aborted() return - use_preset_spotdir = self.settings.get("use_preset_spotdir") + use_preset_spotdir = self.config.settings.get("use_preset_spotdir") if use_preset_spotdir: spotdir = self.get_emulation_sample_dir() if spotdir: @@ -966,7 +975,7 @@ def query_collection_strategy(self, geometric_strategy): sweep_group_counts[sweep.sweepGroup] = count energy_tags = strategy_settings.get("beam_energy_tags") or ( - self.settings["default_beam_energy_tag"], + self.config.settings["default_beam_energy_tag"], ) # NBNB HACK - this needs to eb done properly # Used for determining whether to query wedge width @@ -1016,7 +1025,7 @@ def query_collection_strategy(self, geometric_strategy): lines = ["Experiment length: %6.1f°" % data_model.strategy_length] beam_energies = OrderedDict((("Characterisation", initial_energy),)) dose_label = "Characterisation dose (MGy)" - if not self.settings.get("recentre_before_start"): + if not self.config.settings.get("recentre_before_start"): # replace planned orientation with current orientation current_pos_dict = HWR.beamline.diffractometer.get_positions() dd0 = list(axis_setting_dicts.values())[0] @@ -1051,7 +1060,7 @@ def query_collection_strategy(self, geometric_strategy): # Set up image width pulldown allowed_widths = geometric_strategy.allowedWidths if not allowed_widths: - allowed_widths = list(self.settings.get("default_image_widths")) + allowed_widths = list(self.config.settings.get("default_image_widths")) allowed_widths.sort() logging.getLogger("HWR").info( "No allowed image widths returned by strategy - use defaults" @@ -1177,7 +1186,7 @@ def query_collection_strategy(self, geometric_strategy): fields["wedge_width"] = { "title": "Wedge width (°)", "type": "number", - "default": self.settings.get("default_wedge_width", 15), + "default": self.config.settings.get("default_wedge_width", 15), "minimum": 0.1, "maximum": 7200, } @@ -1204,7 +1213,9 @@ def query_collection_strategy(self, geometric_strategy): # recentring mode: labels = list(RECENTRING_MODES.keys()) modes = list(RECENTRING_MODES.values()) - default_recentring_mode = self.settings.get("default_recentring_mode", "sweep") + default_recentring_mode = self.config.settings.get( + "default_recentring_mode", "sweep" + ) if default_recentring_mode == "scan" or default_recentring_mode not in modes: raise ValueError( "invalid default recentring mode '%s' " % default_recentring_mode @@ -1355,7 +1366,9 @@ def query_collection_strategy(self, geometric_strategy): if value: image_width = float(value) else: - image_width = self.settings.get("default_image_width", default_image_width) + image_width = self.config.settings.get( + "default_image_width", default_image_width + ) result[tag] = image_width # exposure_time OK as is tag = "repetition_count" @@ -1420,7 +1433,9 @@ def setup_data_collection(self, payload, correlation_id): allowed_widths[geometric_strategy.defaultWidthIdx or 0] ) else: - default_image_width = list(self.settings.get("default_image_widths"))[0] + default_image_width = list( + self.config.settings.get("default_image_widths") + )[0] # get parameters and initial transmission/use_dose if gphl_workflow_model.automation_mode: @@ -1608,7 +1623,7 @@ def setup_data_collection(self, payload, correlation_id): # Check if sample is currently centred, and centre first sweep if not if ( - self.settings.get("recentre_before_start") + self.config.settings.get("recentre_before_start") and not gphl_workflow_model.characterisation_done ): # Sample has never been centred reliably. @@ -1639,7 +1654,7 @@ def setup_data_collection(self, payload, correlation_id): # Get translation setting from recentring or current (MAY be used) if has_recentring_file: # calculate first sweep recentring from okp - tol = self.settings.get("angular_tolerance", 1.0) + tol = self.config.settings.get("angular_tolerance", 1.0) translation_settings = self.calculate_recentring( okp, ref_xyz=current_xyz, ref_okp=current_okp ) @@ -1860,7 +1875,7 @@ def calculate_recentring(self, okp, ref_okp, ref_xyz): def collect_data(self, payload, correlation_id): collection_proposal = payload - angular_tolerance = float(self.settings.get("angular_tolerance", 1.0)) + angular_tolerance = float(self.config.settings.get("angular_tolerance", 1.0)) queue_manager = self._queue_entry.get_queue_controller() gphl_workflow_model = self._queue_entry.get_data_model() @@ -1890,7 +1905,11 @@ def collect_data(self, payload, correlation_id): sweep_offset = geometric_strategy.sweepOffset scan_count = len(scans) - if repeat_count and sweep_offset and self.settings.get("use_multitrigger"): + if ( + repeat_count + and sweep_offset + and self.config.settings.get("use_multitrigger") + ): # commpress unrolled multi-trigger sweep # NBNB as of 202103 this is only allowed for a single sweep # @@ -2072,7 +2091,11 @@ def collect_data(self, payload, correlation_id): acq_parameters.take_snapshots = snapshot_count gphl_workflow_model.current_rotation_id = rotation_id - if repeat_count and sweep_offset and self.settings.get("use_multitrigger"): + if ( + repeat_count + and sweep_offset + and self.config.settings.get("use_multitrigger") + ): # Multitrigger sweep - add in parameters. # NB if we are here ther can be only one scan acq_parameters.num_triggers = scan_count @@ -2609,8 +2632,10 @@ def resolution2dose_budget( assuming an increase in B factor of 1A^2/MGy """ - max_budget = maximum_dose_budget or self.settings.get("maximum_dose_budget", 20) - decay_limit = decay_limit or self.settings.get("decay_limit", 25) + max_budget = maximum_dose_budget or self.config.settings.get( + "maximum_dose_budget", 20 + ) + decay_limit = decay_limit or self.config.settings.get("decay_limit", 25) result = 2 * resolution * resolution * math.log(100.0 / decay_limit) # return min(result, max_budget) / relative_rad_sensitivity diff --git a/mxcubecore/HardwareObjects/Gphl/GphlWorkflowConnection.py b/mxcubecore/HardwareObjects/Gphl/GphlWorkflowConnection.py index 08a156fcb2..aaf8bbd535 100644 --- a/mxcubecore/HardwareObjects/Gphl/GphlWorkflowConnection.py +++ b/mxcubecore/HardwareObjects/Gphl/GphlWorkflowConnection.py @@ -43,7 +43,10 @@ from py4j.protocol import Py4JJavaError from mxcubecore import HardwareRepository as HWR -from mxcubecore.BaseHardwareObjects import HardwareObjectYaml +from mxcubecore.BaseHardwareObjects import ( + ConfiguredObject, + HardwareObject, +) from mxcubecore.HardwareObjects.Gphl import GphlMessages from mxcubecore.utils import conversion @@ -87,11 +90,25 @@ __author__ = "Rasmus H Fogh" -class GphlWorkflowConnection(HardwareObjectYaml): +class GphlWorkflowConnection(HardwareObject): """ This HO acts as a gateway to the Global Phasing workflow engine. """ + class HOConfig(ConfiguredObject.HOConfig): + """Temporary replacement for Pydantic class + + Required during transition, as long as we do nto have teh fields defined""" + + # Defaults - should be replaced by proper Pydantic + software_paths = {} + software_properties = {} + directory_locations = {} + gphl_subdir = "GPHL" + gphl_persistname = "persistence" + ssh_options = {} + connection_parameters = {} + def __init__(self, name): super().__init__(name) # Py4J gateway to external workflow program @@ -107,20 +124,14 @@ def __init__(self, name): self._running_process = None self.collect_emulator_process = None - # Configured parameters - self.ssh_options = self.get_property("ssh_options", {}) - self.gphl_subdir = self.get_property("gphl_subdir", "GPHL") - self.gphl_persistname = self.get_property("gphl_persistname", "persistence") - self.connection_parameters = self.get_property("connection_parameters", {}) - self.update_state(self.STATES.UNKNOWN) def init(self): super().init() # Adapt connections if we are running via ssh - if self.ssh_options: - self.connection_parameters["python_address"] = socket.gethostname() + if self.config.ssh_options: + self.config.connection_parameters["python_address"] = socket.gethostname() # Adapt paths and properties to use directory_locations locations = self.config.directory_locations @@ -187,7 +198,7 @@ def open_connection(self): else: return - params = self.connection_parameters + params = self.config.connection_parameters python_parameters = {} val = params.get("python_address") @@ -235,13 +246,13 @@ def start_workflow(self, workflow_queue, workflow_model_obj): # Cannot be done in init, where the api.sessions link is not yet ready self.config.software_paths["GPHL_WDIR"] = os.path.join( - HWR.beamline.session.get_base_process_directory(), self.gphl_subdir + HWR.beamline.session.get_base_process_directory(), self.config.gphl_subdir ) strategy_settings = workflow_model_obj.strategy_settings - wf_settings = HWR.beamline.gphl_workflow.settings + wf_settings = HWR.beamline.gphl_workflow.config.settings - ssh_options = self.ssh_options + ssh_options = self.config.ssh_options in_shell = bool(ssh_options) if in_shell: ssh_options = ssh_options.copy() @@ -291,7 +302,7 @@ def start_workflow(self, workflow_queue, workflow_model_obj): if "prefix" in workflow_options: workflow_options["prefix"] = path_template.base_prefix workflow_options["wdir"] = self.config.software_paths["GPHL_WDIR"] - workflow_options["persistname"] = self.gphl_persistname + workflow_options["persistname"] = self.config.gphl_persistname # Set the workflow root subdirectory parameter from the base image directory image_root = os.path.abspath(HWR.beamline.session.get_base_image_directory()) diff --git a/mxcubecore/HardwareObjects/LNLS/LNLSDiffractometer.py b/mxcubecore/HardwareObjects/LNLS/LNLSDiffractometer.py index 184836301c..d7afead56a 100644 --- a/mxcubecore/HardwareObjects/LNLS/LNLSDiffractometer.py +++ b/mxcubecore/HardwareObjects/LNLS/LNLSDiffractometer.py @@ -358,7 +358,7 @@ def calculate_move_to_beam_pos(self, x, y): if y <= (- x + 1152): dir_goniox = 1 else: - dir_goniox = -1 + dir_goniox = -1 move_goniox = dir_goniox * drx_goniox # mm to move move_goniox = move_goniox / self.pixels_per_mm_x diff --git a/mxcubecore/HardwareObjects/QtGraphicsManager.py b/mxcubecore/HardwareObjects/QtGraphicsManager.py index 01463a05de..4a9801d5a4 100644 --- a/mxcubecore/HardwareObjects/QtGraphicsManager.py +++ b/mxcubecore/HardwareObjects/QtGraphicsManager.py @@ -215,7 +215,7 @@ def init(self): self.graphics_view.keyPressedSignal.connect(self.key_pressed) self.graphics_view.wheelSignal.connect(self.mouse_wheel_scrolled) - self.diffractometer_hwobj = self.get_object_by_role("diffractometer") + self.diffractometer_hwobj = HWR.beamline.diffractometer self.graphics_view.resizeEvent = self.resizeEvent if self.diffractometer_hwobj is not None: diff --git a/mxcubecore/HardwareObjects/UnitTest.py b/mxcubecore/HardwareObjects/UnitTest.py index 8c20abd91c..2c797c075d 100644 --- a/mxcubecore/HardwareObjects/UnitTest.py +++ b/mxcubecore/HardwareObjects/UnitTest.py @@ -55,17 +55,17 @@ def test_get_value(self): logging.getLogger("HWR").debug("UnitTest: Testing aperture hwobj") self.assertIn( - type(BEAMLINE.beam.aperture.get_diameter_size()), + type(beamline.beam.aperture.get_diameter_size()), (float, int), "Aperture | get_diameter_size() returns float", ) self.assertIn( - type(BEAMLINE.beam.aperture.get_diameter_size_list()), + type(beamline.beam.aperture.get_diameter_size_list()), (list, tuple), "Aperture | get_diameter_size_list() returns list or tuple", ) self.assertIn( - type(BEAMLINE.beam.aperture.get_position_list()), + type(beamline.beam.aperture.get_position_list()), (list, tuple), "Aperture | get_position_list() returns list or tuple", ) diff --git a/mxcubecore/HardwareObjects/abstract/AbstractProcedure.py b/mxcubecore/HardwareObjects/abstract/AbstractProcedure.py index 6db71712ef..fe46c3a594 100644 --- a/mxcubecore/HardwareObjects/abstract/AbstractProcedure.py +++ b/mxcubecore/HardwareObjects/abstract/AbstractProcedure.py @@ -55,7 +55,6 @@ class ProcedureState(IntEnum): class AbstractProcedure(ConfiguredObject): - __content_roles = [] _ARGS_CLASS = () _KWARGS_CLASS = {} diff --git a/mxcubecore/HardwareObjects/abstract/AbstractXrayCentring.py b/mxcubecore/HardwareObjects/abstract/AbstractXrayCentring.py index ac484c5652..444d2407ab 100644 --- a/mxcubecore/HardwareObjects/abstract/AbstractXrayCentring.py +++ b/mxcubecore/HardwareObjects/abstract/AbstractXrayCentring.py @@ -33,11 +33,11 @@ import abc from mxcubecore import HardwareRepository as HWR -from mxcubecore.BaseHardwareObjects import HardwareObjectYaml +from mxcubecore.BaseHardwareObjects import HardwareObject from mxcubecore.model import queue_model_objects -class AbstractXrayCentring(HardwareObjectYaml): +class AbstractXrayCentring(HardwareObject): """Xray Centring Hardware Object. Set to Yaml configuration.""" def __init__(self, name): diff --git a/mxcubecore/HardwareObjects/mockup/MultiCollectMockup.py b/mxcubecore/HardwareObjects/mockup/MultiCollectMockup.py index 09a79be803..be287cabf3 100644 --- a/mxcubecore/HardwareObjects/mockup/MultiCollectMockup.py +++ b/mxcubecore/HardwareObjects/mockup/MultiCollectMockup.py @@ -35,7 +35,7 @@ def init(self): resolution=HWR.beamline.resolution, detector_distance=HWR.beamline.detector.distance, transmission=HWR.beamline.transmission, - undulators=HWR.beamline.undulators, + undulators=HWR.beamline.config.undulators, flux=HWR.beamline.flux, detector=HWR.beamline.detector, beam_info=HWR.beamline.beam, diff --git a/mxcubecore/configuration/embl_hh_p14/beamline_config.yml b/mxcubecore/configuration/embl_hh_p14/beamline_config.yml index 12607d3155..3a15310f8f 100644 --- a/mxcubecore/configuration/embl_hh_p14/beamline_config.yml +++ b/mxcubecore/configuration/embl_hh_p14/beamline_config.yml @@ -5,7 +5,7 @@ # The class to initialise, and init parameters _initialise_class: - class: mxcubecore.HardwareObjects.EMBL.EMBLBeamline.EMBLBeamline + class: mxcubecore.HardwareObjects.Beamline.Beamline # Further key-value pairs here will be passed to the class init # mode: devel diff --git a/mxcubecore/model/queue_model_objects.py b/mxcubecore/model/queue_model_objects.py index 952c68fbd3..4b4c981aa2 100644 --- a/mxcubecore/model/queue_model_objects.py +++ b/mxcubecore/model/queue_model_objects.py @@ -2039,7 +2039,7 @@ def __init__(self): # # Centring handling and MXCuBE-side flow self.set_requires_centring(False) - self.set_from_dict(workflow_hwobj.settings["defaults"]) + self.set_from_dict(workflow_hwobj.config.settings["defaults"]) # Set missing values from BL defaults and limits. # NB cannot be done till after all HO are initialised. @@ -2152,7 +2152,7 @@ def set_pre_strategy_params( # noqa: C901 self.interleave_order = interleave_order # NB this is an internal dictionary. DO NOT MODIFY - settings = HWR.beamline.gphl_workflow.settings + settings = HWR.beamline.gphl_workflow.config.settings if energies: # Energies *reset* existing list, and there must be at least one @@ -2271,7 +2271,7 @@ def set_pre_acquisition_params( from mxcubecore.HardwareObjects.Gphl import GphlMessages # NB this is an internal dictionary. DO NOT MODIFY - settings = HWR.beamline.gphl_workflow.settings + settings = HWR.beamline.gphl_workflow.config.settings if exposure_time: self.exposure_time = float(exposure_time) @@ -2354,7 +2354,7 @@ def init_from_task_data(self, sample_model, params): if dd1: self.workflow_parameters.update(dd1) - settings = HWR.beamline.gphl_workflow.settings + settings = HWR.beamline.gphl_workflow.config.settings # NB settings is an internal attribute DO NOT MODIFY # Auto acquisition parameters @@ -2419,7 +2419,7 @@ def init_from_task_data(self, sample_model, params): # Set to current wavelength for now - nothing else available wavelength = HWR.beamline.energy.get_wavelength() - role = HWR.beamline.gphl_workflow.settings["default_beam_energy_tag"] + role = HWR.beamline.gphl_workflow.config.settings["default_beam_energy_tag"] self.wavelengths = ( GphlMessages.PhasingWavelength(wavelength=wavelength, role=role), ) diff --git a/test/pytest/test_procedure.py b/test/pytest/test_procedure.py index da076299eb..1bf7df1de6 100644 --- a/test/pytest/test_procedure.py +++ b/test/pytest/test_procedure.py @@ -6,7 +6,7 @@ def test_procedure_init(beamline): assert ( - beamline.mock_procedure is not None + beamline.config.mock_procedure is not None ), "MockProcedure hardware objects is None (not initialized)" # The methods are defined with abc.abstractmethod which will raise # an exception if the method is not defined. So there is no need to test for @@ -15,17 +15,17 @@ def test_procedure_init(beamline): def test_procedure_start(beamline): data = procedure_model.MockDataModel(**{"exposure_time": 5}) - beamline.mock_procedure.start(data) + beamline.config.mock_procedure.start(data) gevent.sleep(1) - assert beamline.mock_procedure.state == ProcedureState.BUSY - beamline.mock_procedure.wait() - assert beamline.mock_procedure.state == ProcedureState.READY + assert beamline.config.mock_procedure.state == ProcedureState.BUSY + beamline.config.mock_procedure.wait() + assert beamline.config.mock_procedure.state == ProcedureState.READY def test_procedure_stop(beamline): data = procedure_model.MockDataModel(**{"exposure_time": 5}) - beamline.mock_procedure.start(data) + beamline.config.mock_procedure.start(data) gevent.sleep(1) - assert beamline.mock_procedure.state == ProcedureState.BUSY - beamline.mock_procedure.stop() - assert beamline.mock_procedure.state == ProcedureState.READY + assert beamline.config.mock_procedure.state == ProcedureState.BUSY + beamline.config.mock_procedure.stop() + assert beamline.config.mock_procedure.state == ProcedureState.READY From e3a050e7324149158f8b2383b8333fa32d8325f9 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Thu, 23 May 2024 19:42:05 +0100 Subject: [PATCH 05/53] Branch now runs in mock mode. Added export of yml config files --- mxcubecore/HardwareObjects/Beamline.py | 2 -- mxcubecore/HardwareRepository.py | 36 +++++++++++++++++-- .../alba_xaloc13/Qt4_graphics-manager.xml | 1 - .../desy_p11/Qt4_graphics-manager.xml | 5 ++- .../configuration/embl_hh_p13/sample-view.xml | 1 - .../configuration/embl_hh_p14/sample-view.xml | 3 +- .../configuration/lnls_manaca/sample_view.xml | 1 - .../configuration/lnls_sol/sample_view.xml | 1 - .../configuration/mockup/qt/sample-view.xml | 5 ++- .../mockup/test/sample-view-mockup.xml | 3 +- .../configuration/mockup/web/sample_view.xml | 1 - .../soleil_px1/Qt4_graphics-manager.xml | 1 - .../soleil_px2/beamline-setup.xml | 5 ++- 13 files changed, 42 insertions(+), 23 deletions(-) diff --git a/mxcubecore/HardwareObjects/Beamline.py b/mxcubecore/HardwareObjects/Beamline.py index 4c0850e018..f5f69d7786 100644 --- a/mxcubecore/HardwareObjects/Beamline.py +++ b/mxcubecore/HardwareObjects/Beamline.py @@ -215,8 +215,6 @@ def get_default_acquisition_parameters(self, acquisition_type="default"): acq_parameters = queue_model_objects.AcquisitionParameters() - print ('@~@~ default_acquisition_parameters', self.config.default_acquisition_parameters) - params = self.config.default_acquisition_parameters["default"].copy() if acquisition_type != "default": dd0 = self.config.default_acquisition_parameters.get(acquisition_type) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 56b48b9394..6db0cb7b5b 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -78,6 +78,12 @@ BEAMLINE_CONFIG_FILE = "beamline_config.yml" +# Temporary hack to export yaml config file verions after loading +# Set to an existing directory to trigger output of yaml config files +# EXPORT_CONFIG_DIR = "/home/rhfogh/pycharm/mock_config_dirs_tmp" +EXPORT_CONFIG_DIR = None + + def load_from_yaml(configuration_file, role, _container=None, _table=None): """ @@ -238,9 +244,28 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if _container is None: print(make_table(column_names, _table)) + elif EXPORT_CONFIG_DIR: + # temporary hack + _export_draft_config_file(result) # return result +def _export_draft_config_file(hwobj): + result = { + "_initialise_class" : { + "class": "%s.%s" % (hwobj.__class__.__module__, hwobj.__class__.__name__), + }, + } + objects_by_role = hwobj.objects_by_role + if objects_by_role: + objects = result["_objects"] = {} + for role, obj in objects_by_role.items(): + objects[role] = "%s.yml" % obj.id + for tag, val in hwobj.config.model_dump().items(): + if tag not in objects_by_role: + result[tag] = val + fp = open(os.path.join(EXPORT_CONFIG_DIR, "%s.yml" % hwobj.id), "w") + yaml.dump(result, fp) def _attach_xml_objects(container, hwobj, role): """Recursively attach XML-configured object to container as role @@ -257,8 +282,15 @@ def _attach_xml_objects(container, hwobj, role): for tag in hwobj._objects_names(): if tag not in objects_by_role: # Complex object, not contained hwobj - objs = list(hwobj._get_objects(tag)) - setattr(hwobj.config, tag, list(_convert_xml_property(obj) for obj in objs)) + objs = list(_convert_xml_property(obj) for obj in hwobj._get_objects(tag)) + if len(objs) == 1: + setattr(hwobj.config, tag, objs[0]) + else: + setattr(hwobj.config, tag, objs) + # + if EXPORT_CONFIG_DIR: + # temporary hack + _export_draft_config_file(hwobj) def _convert_xml_property(hwobj): diff --git a/mxcubecore/configuration/alba_xaloc13/Qt4_graphics-manager.xml b/mxcubecore/configuration/alba_xaloc13/Qt4_graphics-manager.xml index 4676f5b052..b6f2a94fd5 100755 --- a/mxcubecore/configuration/alba_xaloc13/Qt4_graphics-manager.xml +++ b/mxcubecore/configuration/alba_xaloc13/Qt4_graphics-manager.xml @@ -1,5 +1,4 @@ - diff --git a/mxcubecore/configuration/desy_p11/Qt4_graphics-manager.xml b/mxcubecore/configuration/desy_p11/Qt4_graphics-manager.xml index 65284a3511..f463de99e3 100644 --- a/mxcubecore/configuration/desy_p11/Qt4_graphics-manager.xml +++ b/mxcubecore/configuration/desy_p11/Qt4_graphics-manager.xml @@ -1,9 +1,8 @@ - - @@ -12,7 +11,7 @@ - (1, 1.5, 2, 2.5, 3, 4, 5) + (1, 1.5, 2, 2.5, 3, 4, 5) diff --git a/mxcubecore/configuration/embl_hh_p13/sample-view.xml b/mxcubecore/configuration/embl_hh_p13/sample-view.xml index a4b0fb6f5d..36c1aa2b5a 100644 --- a/mxcubecore/configuration/embl_hh_p13/sample-view.xml +++ b/mxcubecore/configuration/embl_hh_p13/sample-view.xml @@ -1,6 +1,5 @@ - (1, 1.5, 2.) True (0.6, 0.6) diff --git a/mxcubecore/configuration/embl_hh_p14/sample-view.xml b/mxcubecore/configuration/embl_hh_p14/sample-view.xml index fde377e2c2..6a0ece1dfd 100644 --- a/mxcubecore/configuration/embl_hh_p14/sample-view.xml +++ b/mxcubecore/configuration/embl_hh_p14/sample-view.xml @@ -1,6 +1,5 @@ - - + [0.5, 1, 1.5, 2] (0.2, 0.2) diff --git a/mxcubecore/configuration/lnls_manaca/sample_view.xml b/mxcubecore/configuration/lnls_manaca/sample_view.xml index 38c006fb91..d1698ed38b 100644 --- a/mxcubecore/configuration/lnls_manaca/sample_view.xml +++ b/mxcubecore/configuration/lnls_manaca/sample_view.xml @@ -5,7 +5,6 @@ - 5 diff --git a/mxcubecore/configuration/lnls_sol/sample_view.xml b/mxcubecore/configuration/lnls_sol/sample_view.xml index 38c006fb91..d1698ed38b 100644 --- a/mxcubecore/configuration/lnls_sol/sample_view.xml +++ b/mxcubecore/configuration/lnls_sol/sample_view.xml @@ -5,7 +5,6 @@ - 5 diff --git a/mxcubecore/configuration/mockup/qt/sample-view.xml b/mxcubecore/configuration/mockup/qt/sample-view.xml index 4a5c192516..66f9c8e6a6 100644 --- a/mxcubecore/configuration/mockup/qt/sample-view.xml +++ b/mxcubecore/configuration/mockup/qt/sample-view.xml @@ -1,8 +1,7 @@ - --> - @@ -11,7 +10,7 @@ - (1, 1.5, 2, 2.5, 3, 4, 5) + (1, 1.5, 2, 2.5, 3, 4, 5) diff --git a/mxcubecore/configuration/mockup/test/sample-view-mockup.xml b/mxcubecore/configuration/mockup/test/sample-view-mockup.xml index ba8980b91e..4840137d92 100644 --- a/mxcubecore/configuration/mockup/test/sample-view-mockup.xml +++ b/mxcubecore/configuration/mockup/test/sample-view-mockup.xml @@ -2,8 +2,7 @@ - - + 5 diff --git a/mxcubecore/configuration/mockup/web/sample_view.xml b/mxcubecore/configuration/mockup/web/sample_view.xml index ee8ccfc5c2..31475f859c 100644 --- a/mxcubecore/configuration/mockup/web/sample_view.xml +++ b/mxcubecore/configuration/mockup/web/sample_view.xml @@ -5,7 +5,6 @@ - 5 diff --git a/mxcubecore/configuration/soleil_px1/Qt4_graphics-manager.xml b/mxcubecore/configuration/soleil_px1/Qt4_graphics-manager.xml index bd25e151a3..fc6bdebfc1 100644 --- a/mxcubecore/configuration/soleil_px1/Qt4_graphics-manager.xml +++ b/mxcubecore/configuration/soleil_px1/Qt4_graphics-manager.xml @@ -1,5 +1,4 @@ - diff --git a/mxcubecore/configuration/soleil_px2/beamline-setup.xml b/mxcubecore/configuration/soleil_px2/beamline-setup.xml index 7977b970d5..2b14d53fec 100644 --- a/mxcubecore/configuration/soleil_px2/beamline-setup.xml +++ b/mxcubecore/configuration/soleil_px2/beamline-setup.xml @@ -1,10 +1,9 @@ - - + @@ -15,7 +14,7 @@ - + From a081325030130d869fd0e67a505e01a0943e7fa8 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Thu, 30 May 2024 10:44:19 +0100 Subject: [PATCH 06/53] Branch now runs in mock mode. Added export of yml config files --- mxcubecore/HardwareRepository.py | 35 +- .../configuration/mockup/gphl/gphl-setup.yml | 182 ++--- .../mockup/gphl/gphl-workflow.yml | 641 +++++++++--------- .../mockup/qt/beamline_config.yml | 98 +-- .../configuration/mockup/xray_centring2.yml | 3 +- 5 files changed, 472 insertions(+), 487 deletions(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 6db0cb7b5b..0844289db5 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -119,20 +119,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): # Load the configuration file with open(configuration_path, "r") as fp0: configuration = yaml.load(fp0) - - # Get actual class - initialise_class = configuration.pop("_initialise_class", None) - if not initialise_class: - if _container: - msg0 = "No '_initialise_class' tag" - else: - # at top level we want to get the actual error - raise ValueError( - "%s file lacks '_initialise_class' tag" % configuration_file - ) - - if not msg0: - class_import = initialise_class.pop("class", None) + class_import = configuration.pop("class", None) if not class_import: if _container: msg0 = "No 'class' tag" @@ -159,7 +146,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if not msg0: try: # instantiate object - result = cls(name=role, **initialise_class) + result = cls(name=role) result._hwobj_container = _container except Exception: if _container: @@ -189,11 +176,12 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if not msg0: # Recursively load contained objects (of any type that the system can support) - _objects = configuration.pop("_objects", {}) + objects = configuration.pop("objects", {}) + config = configuration.pop("configuration", {}) # Set configuration with non-object properties. - result._config = result.HOConfig(**configuration) + result._config = result.HOConfig(**config) - if _objects: + if objects: load_time = 1000 * (time.time() - start_time) msg1 = "Start loading contents:" _table.append( @@ -201,7 +189,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): ) msg0 = "Done loading contents" - for role1, config_file in _objects.items(): + for role1, config_file in objects.items(): fname, fext = os.path.splitext(config_file) if fext in (".yaml", ".yml"): @@ -252,18 +240,17 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): def _export_draft_config_file(hwobj): result = { - "_initialise_class" : { - "class": "%s.%s" % (hwobj.__class__.__module__, hwobj.__class__.__name__), - }, + "class": "%s.%s" % (hwobj.__class__.__module__, hwobj.__class__.__name__), } objects_by_role = hwobj.objects_by_role if objects_by_role: - objects = result["_objects"] = {} + objects = result["objects"] = {} for role, obj in objects_by_role.items(): objects[role] = "%s.yml" % obj.id + config = result["configuration"] ={} for tag, val in hwobj.config.model_dump().items(): if tag not in objects_by_role: - result[tag] = val + config[tag] = val fp = open(os.path.join(EXPORT_CONFIG_DIR, "%s.yml" % hwobj.id), "w") yaml.dump(result, fp) diff --git a/mxcubecore/configuration/mockup/gphl/gphl-setup.yml b/mxcubecore/configuration/mockup/gphl/gphl-setup.yml index 967bacfc29..fa7c27980d 100644 --- a/mxcubecore/configuration/mockup/gphl/gphl-setup.yml +++ b/mxcubecore/configuration/mockup/gphl/gphl-setup.yml @@ -1,94 +1,94 @@ %YAML 1.2 --- -_initialise_class: - class: mxcubecore.HardwareObjects.Gphl.GphlWorkflowConnection.GphlWorkflowConnection - -directory_locations: - # Directory locations. Can also be used with ${} syntax in this file - GPHL_INSTALLATION: /alt/rhfogh/Software/GPhL/gphl_release - # GPHL_INSTALLATION: - # /alt/rhfogh/Software/GPhL/nightly_20230823/Files_workflow_TRUNK_alpha-bdg - # GPHL_INSTALLATION: - # /alt/rhfogh/Software/GPhL/nightly_20230602/Files_workflow_BETA_beta-academic - - -# If set, run workflow through ssh with selected options -# and quote parameters as appropriate -# NB The Host option is mandatory -# ConfigFile is the ssh configuration file, passed to ssh with the -F option -# ssh_options: -# Host: lonsdale -# IdentityFile: /home/rhfogh/.ssh/wf_test_id -# User: rhfogh -# StrictHostKeyChecking: no -# ConfigFile: /path/to/ssh/config/file - -# Directory used for GPhL workflow persistence data and workflow logs: -# Used to set the wdir workflow option -# If relative designates a subdirectory under Session.get_process_directory() -# If absolute, taken as it stands -gphl_subdir: GPHL -# root name of persistence layer file; suffix'.xml. is added -gphl_persistname: persistence - -# Hosts and ports for py4j java-python gateway, all OPTIONAL -# If not set will use py4j default values, which are as given below -# NB python_host is set automatically to localhost or socket.gethostname() -# NB java_host is not needed as all communication is effectively one-way -# connection_parameters: -# python_port: 25334 -# java_port: 25333 - -# NB Non-absolute file names are interpreted relative to one of the -# HardwareRepository directories on the lookup path - -software_paths: - # Mandatory. Directory with workflow config input, e.g. instrumentation.nml - gphl_beamline_config: gphl_beamline_config - - # MANDATORY for CCP4 => $GPHL_CCP4_PATH/bin/ccp4.setup-sh, - # unless already sourced in environment - GPHL_CCP4_PATH: /alt/rhfogh/Software/CCP4/latest/bin/ - - # MANDATORY for XDS => $GPHL_XDS_PATH/xds_par - # unless already sourced in environment - GPHL_XDS_PATH: /alt/rhfogh/Software/XDS/latest/ - - # gphl test samples directory - for collection emulation - gphl_test_samples: gphl/test_samples - - # Location of installation for processing queue (if not reachable from BCS computer. - # Passed as environmental variable GPHL_PROC_INSTALLATION. Optional - # gphl_wf_processing_installation: /users/pkeller/GPhL-ESRF/exe - -# Software paths that are passed to global phasing workflow as java properties -# Used to specify locations of alternative executables (wrappers) -# More (unused) properties are described in the ASTRA workflow documentation -software_properties: - # Location of GPhL .license directory - # defaults to GPHL_INSTALLATION - # Only needed if you do NOT use the gphl installation wrapper scripts in .../exe : - # co.gphl.wf.bdg_licence_dir: "{GPHL_RELEASE}/gphl_release/latest" - - # MANDATORY pending change in workflow - co.gphl.wf.stratcal.bin: "{GPHL_INSTALLATION}/exe/run_stratcal_wrap.py" - - # OPTIONAL location of alternative executables (wrappers) supported by the workflow - # Each has an option (rarely used) to specify a specific license directory --> - # co.gphl.wf.process.bin: {GPHL_WRAPPERS}/scripts/aP_wf_process - # co.gphl.wf.process.bdg_licence_dir/fill/me/in - # co.gphl.wf.diffractcal.bin: {GPHL_WRAPPERS}/scripts/aP_wf_rotcal - # co.gphl.wf.diffractcal.bdg_licence_dir/fill/me/in - # co.gphl.wf.simcal_predict.bin: {GPHL_WRAPPERS}/scripts/simcal_predict - # co.gphl.wf.simcal_predict.bdg_licence_dir/fill/me/in - # co.gphl.wf.transcal.bin: {GPHL_WRAPPERS}/scripts/transcal - # co.gphl.wf.transcal.bdg_licence_dir/fill/me/in - # co.gphl.wf.recen.bin: {GPHL_WRAPPERS}/scripts/recen - # co.gphl.wf.recen.bdg_licence_dir/fill/me/in - - # OPTIONAL. simcal *binary* For Mock collection emulation only. Not used by workflow - co.gphl.wf.simcal.bin: - /alt/rhfogh/Software/GPhL/nightly_20240611/Files_workflow_TRUNK_alpha-bdg/autoPROC/bin/linux64/simcal - co.gphl.wf.simcal.bdg_licence_dir: - /alt/rhfogh/Software/GPhL/nightly_20240611/Files_workflow_TRUNK_alpha-bdg +class: mxcubecore.HardwareObjects.Gphl.GphlWorkflowConnection.GphlWorkflowConnection + +configuration: + directory_locations: + # Directory locations. Can also be used with ${} syntax in this file + GPHL_INSTALLATION: /alt/rhfogh/Software/GPhL/gphl_release + # GPHL_INSTALLATION: + # /alt/rhfogh/Software/GPhL/nightly_20230823/Files_workflow_TRUNK_alpha-bdg + # GPHL_INSTALLATION: + # /alt/rhfogh/Software/GPhL/nightly_20230602/Files_workflow_BETA_beta-academic + + + # If set, run workflow through ssh with selected options + # and quote parameters as appropriate + # NB The Host option is mandatory + # ConfigFile is the ssh configuration file, passed to ssh with the -F option + # ssh_options: + # Host: lonsdale + # IdentityFile: /home/rhfogh/.ssh/wf_test_id + # User: rhfogh + # StrictHostKeyChecking: no + # ConfigFile: /path/to/ssh/config/file + + # Directory used for GPhL workflow persistence data and workflow logs: + # Used to set the wdir workflow option + # If relative designates a subdirectory under Session.get_process_directory() + # If absolute, taken as it stands + gphl_subdir: GPHL + # root name of persistence layer file; suffix'.xml. is added + gphl_persistname: persistence + + # Hosts and ports for py4j java-python gateway, all OPTIONAL + # If not set will use py4j default values, which are as given below + # NB python_host is set automatically to localhost or socket.gethostname() + # NB java_host is not needed as all communication is effectively one-way + # connection_parameters: + # python_port: 25334 + # java_port: 25333 + + # NB Non-absolute file names are interpreted relative to one of the + # HardwareRepository directories on the lookup path + + software_paths: + # Mandatory. Directory with workflow config input, e.g. instrumentation.nml + gphl_beamline_config: gphl_beamline_config + + # MANDATORY for CCP4 => $GPHL_CCP4_PATH/bin/ccp4.setup-sh, + # unless already sourced in environment + GPHL_CCP4_PATH: /alt/rhfogh/Software/CCP4/latest/bin/ + + # MANDATORY for XDS => $GPHL_XDS_PATH/xds_par + # unless already sourced in environment + GPHL_XDS_PATH: /alt/rhfogh/Software/XDS/latest/ + + # gphl test samples directory - for collection emulation + gphl_test_samples: gphl/test_samples + + # Location of installation for processing queue (if not reachable from BCS computer. + # Passed as environmental variable GPHL_PROC_INSTALLATION. Optional + # gphl_wf_processing_installation: /users/pkeller/GPhL-ESRF/exe + + # Software paths that are passed to global phasing workflow as java properties + # Used to specify locations of alternative executables (wrappers) + # More (unused) properties are described in the ASTRA workflow documentation + software_properties: + # Location of GPhL .license directory + # defaults to GPHL_INSTALLATION + # Only needed if you do NOT use the gphl installation wrapper scripts in .../exe : + # co.gphl.wf.bdg_licence_dir: "{GPHL_RELEASE}/gphl_release/latest" + + # MANDATORY pending change in workflow + co.gphl.wf.stratcal.bin: "{GPHL_INSTALLATION}/exe/run_stratcal_wrap.py" + + # OPTIONAL location of alternative executables (wrappers) supported by the workflow + # Each has an option (rarely used) to specify a specific license directory --> + # co.gphl.wf.process.bin: {GPHL_WRAPPERS}/scripts/aP_wf_process + # co.gphl.wf.process.bdg_licence_dir/fill/me/in + # co.gphl.wf.diffractcal.bin: {GPHL_WRAPPERS}/scripts/aP_wf_rotcal + # co.gphl.wf.diffractcal.bdg_licence_dir/fill/me/in + # co.gphl.wf.simcal_predict.bin: {GPHL_WRAPPERS}/scripts/simcal_predict + # co.gphl.wf.simcal_predict.bdg_licence_dir/fill/me/in + # co.gphl.wf.transcal.bin: {GPHL_WRAPPERS}/scripts/transcal + # co.gphl.wf.transcal.bdg_licence_dir/fill/me/in + # co.gphl.wf.recen.bin: {GPHL_WRAPPERS}/scripts/recen + # co.gphl.wf.recen.bdg_licence_dir/fill/me/in + + # OPTIONAL. simcal *binary* For Mock collection emulation only. Not used by workflow + co.gphl.wf.simcal.bin: + /alt/rhfogh/Software/GPhL/nightly_20240611/Files_workflow_TRUNK_alpha-bdg/autoPROC/bin/linux64/simcal + co.gphl.wf.simcal.bdg_licence_dir: + /alt/rhfogh/Software/GPhL/nightly_20240611/Files_workflow_TRUNK_alpha-bdg diff --git a/mxcubecore/configuration/mockup/gphl/gphl-workflow.yml b/mxcubecore/configuration/mockup/gphl/gphl-workflow.yml index 435219b757..c1f4f5ea72 100644 --- a/mxcubecore/configuration/mockup/gphl/gphl-workflow.yml +++ b/mxcubecore/configuration/mockup/gphl/gphl-workflow.yml @@ -1,107 +1,106 @@ %YAML 1.2 --- -_initialise_class: - class: mxcubecore.HardwareObjects.Gphl.GphlWorkflow.GphlWorkflow +class: mxcubecore.HardwareObjects.Gphl.GphlWorkflow.GphlWorkflow -settings: - # Controls for MXCuBE UI and behaviour - # Use configured multitrigger collection, default: false, any value is true - # use_multitrigger: true - # Use advanced_mode for adidtional UI options. Default is false, any value is true - advanced_mode: true - # Maximum allowed value of Chi, for strategy calculation - # maximum_chi is automticaly reduced to match kappa motor limits. - maximum_chi: 48.0 - # default values for image width popup; may be overridden by the workflow - # The first value is the default; values are sorted before use - default_image_widths: - - 0.1 - - 0.05 - - 0.2 +configuration: + settings: + # Controls for MXCuBE UI and behaviour + # Use configured multitrigger collection, default: false, any value is true + # use_multitrigger: true + # Use advanced_mode for adidtional UI options. Default is false, any value is true + advanced_mode: true + # Maximum allowed value of Chi, for strategy calculation + # maximum_chi is automticaly reduced to match kappa motor limits. + maximum_chi: 48.0 + # default values for image width popup; may be overridden by the workflow + # The first value is the default; values are sorted before use + default_image_widths: + - 0.1 + - 0.05 + - 0.2 - # tolerance for deciding whether angles differ, in degrees; - # used to compare kappa and phi to decide if orientations needs separate centring - angular_tolerance: 1.0 - # Characterisation strategies popup. - # names refer to stategies in strategylib.nml - characterisation_strategies: - - Char_6_5_multitrigger - - Char_4_by_10 - # - Characterisation_6_5 - # - Char_24deg - # - TEST_Characterisation_12_3 - # - TEST_Characterisation_12_4 - # - TEST_Characterisation_12_5 + # tolerance for deciding whether angles differ, in degrees; + # used to compare kappa and phi to decide if orientations needs separate centring + angular_tolerance: 1.0 + # Characterisation strategies popup. + # names refer to stategies in strategylib.nml + characterisation_strategies: + - Char_6_5_multitrigger + - Char_4_by_10 + # - Characterisation_6_5 + # - Char_24deg + # - TEST_Characterisation_12_3 + # - TEST_Characterisation_12_4 + # - TEST_Characterisation_12_5 - # How to set starting value for beam energy. Values are: - # 'configured': set value from calibration and characterisation strategy file - # 'current': use current value - starting_beamline_energy: current - display_energy_decimals: 4 - default_beam_energy_tag: Main + # How to set starting value for beam energy. Values are: + # 'configured': set value from calibration and characterisation strategy file + # 'current': use current value + starting_beamline_energy: current + display_energy_decimals: 4 + default_beam_energy_tag: Main - # NB Temporary developer option. Defaults to 1 - allow_duplicate_orientations: 0 + # NB Temporary developer option. Defaults to 1 + allow_duplicate_orientations: 0 - # TEST ONLY developer option. Use preset SPOT.XDS file and skip characterisation. - # Only works for GPhL test samples with SPOT.XDS file - # NB to get sensible results you should use the default values for resolution and - # wavelength, and the default Eiger 16M detector - # Otherwise the UI may still work, but the indexing solutions will be weird - # use_preset_spotdir: true + # TEST ONLY developer option. Use preset SPOT.XDS file and skip characterisation. + # Only works for GPhL test samples with SPOT.XDS file + # NB to get sensible results you should use the default values for resolution and + # wavelength, and the default Eiger 16M detector + # Otherwise the UI may still work, but the indexing solutions will be weird + # use_preset_spotdir: True - defaults: - # Default values for queue_model_objects.GphlWorkflow attributes + defaults: + # Default values for queue_model_objects.GphlWorkflow attributes - # Default wedge width in degrees, for interleaved data collection - wedge_width: 30 - # Default number of snapshots to take when centring: 0, 1, 2, or 4 - snapshot_count: 2 - # # Default recentring mode. Possibilities are sweep/scan/start/none, - # # default is sweep: - # RECENTRING_MODES = OrderedDict( - # ("when orientation changes","sweep"), - # ("for each wedge","scan"), - # ("at acquisition start","start"), - # ("use predicted centrings only", "none"), - # ) - # mode "scan" is not allowed as default - recentring_mode: sweep - # Parameters to control proposed total dose budget - # The proposed budget is calculated so that radiation damage is predicted - # to reduce the intensity at the highest expected redolution to this percentage - # after the end of the experiment (i.e. value 25 means final intensity - # at the highest resolution is 25% of the starting intensity - # The formula assumes an increase in B factor of 1 A^2/MGy - # The formula used is dose_budget = - # 2 * resolution**2 * log(100. / decay_limit) / relative_sensitivity - decay_limit: 25 - # exposure time (s). NB automticaly reset to be within limits for detector - exposure_time: 0.01 - # Default use known cell parameters as forced starting point for processing - use_cell_for_processing: true - # Maximum dose budget to propose when converting from resolution, in MGy - maximum_dose_budget: 20 - # Percentage of dose budget to propose for use in characterisation. - characterisation_budget_fraction: 0.05 - # Radiation sensitivity relative to standard crystal - relative_rad_sensitivity: 1.0 - interleave_order: gs + # Default wedge width in degrees, for interleaved data collection + wedge_width: 30 + # Default number of snapshots to take when centring: 0, 1, 2, or 4 + snapshot_count: 2 + # # Default recentring mode. Possibilities are sweep/scan/start/none, + # # default is sweep: + # RECENTRING_MODES = OrderedDict( + # ("when orientation changes","sweep"), + # ("for each wedge","scan"), + # ("at acquisition start","start"), + # ("use predicted centrings only", "none"), + # ) + # mode "scan" is not allowed as default + recentring_mode: sweep + # Parameters to control proposed total dose budget + # The proposed budget is calculated so that radiation damage is predicted + # to reduce the intensity at the highest expected redolution to this percentage + # after the end of the experiment (i.e. value 25 means final intensity + # at the highest resolution is 25% of the starting intensity + # The formula assumes an increase in B factor of 1 A^2/MGy + # The formula used is dose_budget = + # 2 * resolution**2 * log(100. / decay_limit) / relative_sensitivity + decay_limit: 25 + # exposure time (s). NB automticaly reset to be within limits for detector + exposure_time: 0.01 + # Default use known cell parameters as forced starting point for processing + use_cell_for_processing: true + # Maximum dose budget to propose when converting from resolution, in MGy + maximum_dose_budget: 20 + # Percentage of dose budget to propose for use in characterisation. + characterisation_budget_fraction: 0.05 + # Radiation sensitivity relative to standard crystal + relative_rad_sensitivity: 1.0 + interleave_order: gs - # For testing only: - # value TEST_FROM_FILE mode reads input from environment variable GPHL_TEST_INPUT - # (which may override this value) - # Any other true value reads from auto_acq_parameters below - # automation_mode: TEST_FROM_FILE + # For testing only: + # value TEST_FROM_FILE mode reads input from environment variable GPHL_TEST_INPUT + # (which may override this value) + # Any other true value reads from auto_acq_parameters below + # automation_mode: TEST_FROM_FILE - # Default parameters for fully automated strategies - # Multiple acquisitions in order - characterisation then main - # passed to set_pre_strategy_params and set_pre_acquisition_params - # NB as long as we only acquire either characterisation+main or diffractcal - # the code will use list[0] for the first acquisition - # and list[-1] for the main one + # Default parameters for fully automated strategies + # Multiple acquisitions in order - characterisation then main + # passed to set_pre_strategy_params and set_pre_acquisition_params + # NB as long as we only acquire either characterisation+main or diffractcal + # the code will use list[0] for the first acquisition and list[-1] for the main one auto_acq_parameters: # For characterisation acquisition - exposure_time: 0.02 @@ -126,266 +125,266 @@ settings: strategy: full repetition_count: 1 # strategy_options: - # # Program options passed directly to stratcal - not needed normally - # Override default settings - # angular_tolerance, maximum_chi, and clip_kappa are defined in - # settings (above) and strategy_type and variant by program - # maximum_chi: 48.0 - # angular_tolerance: 1.0 - # option_name: 999.999 + # Program options passed directly to stratcal - not needed normally + # Override default settings + # angular_tolerance, maximum_chi, and clip_kappa are defined in + # settings (above) and strategy_type and variant by program + # maximum_chi: 48.0 + # angular_tolerance: 1.0 + # option_name: 999.999 - # Java invocation properties - syntax is e.g. '-Dfile.encoding=UTF-8' - invocation_properties: + # Java invocation properties - syntax is e.g. '-Dfile.encoding=UTF-8' + invocation_properties: - # File name prefix for workflow log files - co.gphl.wf.persistName: wf + # File name prefix for workflow log files + co.gphl.wf.persistName: wf - # Switch to pass log to stdout. Defaults to true - # co.gphl.wf.logStdout: false + # Switch to pass log to stdout. Defaults to True + # co.gphl.wf.logStdout: false - # Properties applied after the java command - syntax as above--> - # Note that irrelevant properties are simply ignored.--> - workflow_properties: + # Properties applied after the java command - syntax as above--> + # Note that irrelevant properties are simply ignored.--> + workflow_properties: - # If set, run after-acquisition processing in detatched mode - co.gphl.wf.process.detach: true + # If set, run after-acquisition processing in detatched mode + co.gphl.wf.process.detach: true - # Set to false to switch off pre-start checks for directory existence - # in cases where the directories are created after the workflow starts - # Checks for data directory tree - co.gphl.wf.blRootCheck: false - # Chcks for data processing directory tree - # co.gphl.wf.procRootCheck: false + # Set to false to switch off pre-start checks for directory existence + # in cases where the directories are created after the workflow starts + # Checks for data directory tree + co.gphl.wf.blRootCheck: false + # Chcks for data processing directory tree + # co.gphl.wf.procRootCheck: false - # Parameters to individual applications - # Switch devMod to true to activate development parameters - # NB needed as of 20231116 no longer in active use - # co.gphl.wf.devMode: true - # For stratcal configuration switches edit run_stratcal_wrap.py script instead + # Parameters to individual applications + # Switch devMod to True to activate development parameters + # NB needed as of 20231116 no longer in active use + # co.gphl.wf.devMode: true + # For stratcal configuration switches edit run_stratcal_wrap.py script instead - # Dummy sample properties. - # Choose values for these to make spot predictions for the characterisation - # strategy look reasonable - # (they have no basis, and are not used for any scientific purpose). - # NB predicted spots are not currently displayed or used. Leave unchanged - # NB the following should be moved to a simcal-specific file - co.gphl.wf.simcal_predict.b_wilson: 26.0 - co.gphl.wf.simcal_predict.cell_dim_sd_scale: 1.5e-3 - co.gphl.wf.simcal_predict.mosaicity: 0.2 - # # Testing ONLY: - # co.gphl.wf.run_number: 7 + # Dummy sample properties. + # Choose values for these to make spot predictions for the characterisation + # strategy look reasonable + # (they have no basis, and are not used for any scientific purpose). + # NB predicted spots are not currently displayed or used. Leave unchanged + # NB the following should be moved to a simcal-specific file + co.gphl.wf.simcal_predict.b_wilson: 26.0 + co.gphl.wf.simcal_predict.cell_dim_sd_scale: 1.5e-3 + co.gphl.wf.simcal_predict.mosaicity: 0.2 + # # Testing ONLY: + # co.gphl.wf.run_number: 7 -# Workflows, The options in the top elements are updated with the options -# in the individual type, and passed as options to teh workflow application -# The following options are set elsewhere and can *not* be set here -# appdir: Processing root directory. -# Set automatically to Session.get_process_directory() -# rootsubdir: Subdirectory under data and processing root directories. -# Set automatically to reflect MXCuBE setup -# wdir: Workflow working directory -# Set automatically to Session.get_process_directory() -# see gphl_subdir in gphl-setup.xml -# persistname: Workflow persistence file is named persistnam>.xml -# Set in gph-setup.xml -# gphl_subdir is not a workflow option. It is set in gphl-setup.xml -workflows: - "GΦL Acquisition Workflow": - wfpath: Gphl - wftype: acquisition - requires: - - point - options: - # NB you must set EITHER wfprefix OR samplesubdir (or both) - # wfprefix is the enactment fixed file prefix - # wfprefix: gphl_wf_ - # Include sample filename prefix as subdirectory in enactment directory path - # Any - samplesubdir: null - # # directory paths are: - # wfprefix: .../RAW_DATA/_001/... - # samplesubdir: .../RAW_DATA/_001/... - # both: .../RAW_DATA//_001/... - - strategies: - - title: Native data collection - strategy_type: native - documentation: | - Single wavelength data acquisition for native data collection - Includes initial characterisation and complete collection. - Variants: - full: 2-4 sweeps, ca. 600-1000 degrees, best possible data. - quick: 1-2 sweeps, 360-400 degrees, quick and cusp-free - minimal: 1 sweep of 180+ degrees, 1-2 centrings. - wf_selection: mxexpt - variants: - - full - - minimal - - quick - - ultralong - options: - # Filename pattern name for normal data collections - pattern: interleaved - # Filename pattern name for inverse-beam collections - ibpattern: ib_interleaved - # File name pattern for characterisation data collection - charpattern: generic - - - title: Phasing (SAD) - strategy_type: phasing - wf_selection: mxexpt - variants: - - full - - quick - documentation: | - Single wavelength SAD data acquisition, full strategy. - Aligned on even symetry axes or inverse beam. - Includes initial characterisation and complete collection - Variants: - full: up to 540deg, three orientations - quick: up to 360deg, two orientations - options: - # Filename pattern name for normal data collections - pattern: interleaved - # Filename pattern name for inverse-beam collections - ibpattern: ib_interleaved - # File name pattern for characterisation data collection - charpattern: generic - - - title: Two-wavelength MAD - strategy_type: phasing - wf_selection: mxexpt - variants: - - quick - - full - beam_energy_tags: - - Energy_1 - - Energy_2 - # Workflow interleave order (string); Slowest changing first--> - # 'g' (Goniostat position); 's' (Scan number); - # 'b' (Beam wavelength); 'd' (Detector position)--> - # NB position interleaving in NOT done for the moment. - # So the default ('gs') is a no-op - interleave_order: gsb - documentation: | - Two wavelength MAD data acquisition, full strategy. - Aligned on even symetry axes or inverse beam. - Includes initial characterisation and complete collection - Variants: - full: up to 540deg, three orientations for each wwavelngth - quick: up to 360deg, two orientations for each wwavelngth - options: - # Filename pattern name for normal data collections - pattern: interleaved - # Filename pattern name for inverse-beam collections - ibpattern: ib_interleaved - # File name pattern for characterisation data collection - charpattern: generic - - - title: Three-wavelength MAD - strategy_type: phasing - wf_selection: mxexpt - variants: - - quick - - full - beam_energy_tags: - - Energy_1 - - Energy_2 - - Energy_3 - # Workflow interleave order (string); Slowest changing first--> - # 'g' (Goniostat position); 's' (Scan number); - # 'b' (Beam wavelength); 'd' (Detector position)--> - # NB position interleaving in NOT done for the moment. - # So the default ('gs') is a no-op - interleave_order: gsb - documentation: | - Three wavelength MAD data acquisition, full strategy. - Aligned on even symetry axes or inverse beam. - Includes initial characterisation and complete collection - Variants: - full: up to 540deg, three orientations for each wwavelngth - quick: up to 360deg, two orientations for each wwavelngth - options: - # Filename pattern name for normal data collections - pattern: interleaved - # Filename pattern name for inverse-beam collections - ibpattern: ib_interleaved - # File name pattern for characterisation data collection - charpattern: generic - - "GΦL Diffractometer calibration": + # Workflows, The options in the top elements are updated with the options + # in the individual type, and passed as options to teh workflow application + # The following options are set elsewhere and can *not* be set here + # appdir: Processing root directory. + # Set automatically to Session.get_process_directory() + # rootsubdir: Subdirectory under data and processing root directories. + # Set automatically to reflect MXCuBE setup + # wdir: Workflow working directory + # Set automatically to Session.get_process_directory() + # see gphl_subdir in gphl-setup.xml + # persistname: Workflow persistence file is named persistnam>.xml + # Set in gph-setup.xml + # gphl_subdir is not a workflow option. It is set in gphl-setup.xml + workflows: + "GΦL Acquisition Workflow": wfpath: Gphl - wftype: diffractcal + wftype: acquisition requires: - point options: # NB you must set EITHER wfprefix OR samplesubdir (or both) # wfprefix is the enactment fixed file prefix # wfprefix: gphl_wf_ - # Include sample filename prefix as subdirextory in enactment directory path + # Include sample filename prefix as subdirectory in enactment directory path # Any samplesubdir: null - # # directory paths are: + # # directory paths are: # wfprefix: .../RAW_DATA/_001/... # samplesubdir: .../RAW_DATA/_001/... # both: .../RAW_DATA//_001/... strategies: - - title: Diffractometer calibration - strategy_type: diffractcal - wf_selection: diffractcal + - title: Native data collection + strategy_type: native + documentation: | + Single wavelength data acquisition for native data collection + Includes initial characterisation and complete collection. + Variants: + full: 2-4 sweeps, ca. 600-1000 degrees, best possible data. + quick: 1-2 sweeps, 360-400 degrees, quick and cusp-free + minimal: 1 sweep of 180+ degrees, 1-2 centrings. + wf_selection: mxexpt + variants: + - full + - minimal + - quick + - ultralong + options: + # Filename pattern name for normal data collections + pattern: interleaved + # Filename pattern name for inverse-beam collections + ibpattern: ib_interleaved + # File name pattern for characterisation data collection + charpattern: generic + + - title: Phasing (SAD) + strategy_type: phasing + wf_selection: mxexpt variants: - full - - short + - quick documentation: | - Diffractometer calibration. - Designed for use by beamline personnel. - Calibrates axis directions, detector pane orientation, - and beam centre. - Long data collection and processing, requiring a high-quality, - high-symmetry crystal of precisely known cell parameters. + Single wavelength SAD data acquisition, full strategy. + Aligned on even symetry axes or inverse beam. + Includes initial characterisation and complete collection Variants: - full: 22 60-deg sweeps, 11 orientations - short: 6 60-deg sweeps, 3 orientations + full: up to 540deg, three orientations + quick: up to 360deg, two orientations options: - # wfprefix: Dcalib2_ - # Name of strategy from library to use. Variant name is appended - strategy: DiffractCal_ - # File name for diffractometer calibration output - calibration: diffractcal - # Pattern name for characterisation collections, used for all data - charpattern: multiorientation - # copy calibration result to configuration directory, - # moving aside previous value - # updateblconfig: null - # # Path of file to dump persisted instrumentation configuration. OPTIONAL - # instcfgout: gphl_diffractcal_out.nml + # Filename pattern name for normal data collections + pattern: interleaved + # Filename pattern name for inverse-beam collections + ibpattern: ib_interleaved + # File name pattern for characterisation data collection + charpattern: generic - "GΦL Translational Calibration": - wfpath: Gphl - wftype: transcal - requires: - - samplegrid - strategies: - - title: Translational Calibration - strategy_type: transcal - wf_selection: transcal + - title: Two-wavelength MAD + strategy_type: phasing + wf_selection: mxexpt + variants: + - quick + - full + beam_energy_tags: + - Energy_1 + - Energy_2 + # Workflow interleave order (string); Slowest changing first--> + # 'g' (Goniostat position); 's' (Scan number); + # 'b' (Beam wavelength); 'd' (Detector position)--> + # NB position interleaving in NOT done for the moment. + # So the default ('gs') is a no-op + interleave_order: gsb + documentation: | + Two wavelength MAD data acquisition, full strategy. + Aligned on even symetry axes or inverse beam. + Includes initial characterisation and complete collection + Variants: + full: up to 540deg, three orientations for each wwavelngth + quick: up to 360deg, two orientations for each wwavelngth + options: + # Filename pattern name for normal data collections + pattern: interleaved + # Filename pattern name for inverse-beam collections + ibpattern: ib_interleaved + # File name pattern for characterisation data collection + charpattern: generic + + - title: Three-wavelength MAD + strategy_type: phasing + wf_selection: mxexpt variants: + - quick - full + beam_energy_tags: + - Energy_1 + - Energy_2 + - Energy_3 + # Workflow interleave order (string); Slowest changing first--> + # 'g' (Goniostat position); 's' (Scan number); + # 'b' (Beam wavelength); 'd' (Detector position)--> + # NB position interleaving in NOT done for the moment. + # So the default ('gs') is a no-op + interleave_order: gsb documentation: | - Translational calibration. - Designed for use by beamline personnel. - Calibrates centring motors to allow prediction of centring positions - Consists of mulltiple centrings, requiring a glass or tungsten tip + Three wavelength MAD data acquisition, full strategy. + Aligned on even symetry axes or inverse beam. + Includes initial characterisation and complete collection + Variants: + full: up to 540deg, three orientations for each wwavelngth + quick: up to 360deg, two orientations for each wwavelngth options: - # file: File containing settings of rotation axes for calibration - # The name is the relative path from the gphl_beamline_config directory - # grid: Grid definition [axis_name:start:end:step], - # slowest-varying to fastest varying - # EITHER file OR grid must be set.--> - # grid: grid_axes_spec - file: transcal_2stage.json - # copy calibration result to configuration directory, - # renaming previous result file - updateblconfig: null - # Path of file to dump persisted instrumentation configuration. OPTIONAL - # instcfgout: gphl_transcal_out.nml + # Filename pattern name for normal data collections + pattern: interleaved + # Filename pattern name for inverse-beam collections + ibpattern: ib_interleaved + # File name pattern for characterisation data collection + charpattern: generic + + "GΦL Diffractometer calibration": + wfpath: Gphl + wftype: diffractcal + requires: + - point + options: + # NB you must set EITHER wfprefix OR samplesubdir (or both) + # wfprefix is the enactment fixed file prefix + # wfprefix: gphl_wf_ + # Include sample filename prefix as subdirextory in enactment directory path + # Any + samplesubdir: null + # # directory paths are: + # wfprefix: .../RAW_DATA/_001/... + # samplesubdir: .../RAW_DATA/_001/... + # both: .../RAW_DATA//_001/... + + strategies: + - title: Diffractometer calibration + strategy_type: diffractcal + wf_selection: diffractcal + variants: + - full + - short + documentation: | + Diffractometer calibration. + Designed for use by beamline personnel. + Calibrates axis directions, detector pane orientation, + and beam centre. + Long data collection and processing, requiring a high-quality, + high-symmetry crystal of precisely known cell parameters. + Variants: + full: 22 60-deg sweeps, 11 orientations + short: 6 60-deg sweeps, 3 orientations + options: + # wfprefix: Dcalib2_ + # Name of strategy from library to use. Variant name is appended + strategy: DiffractCal_ + # File name for diffractometer calibration output + calibration: diffractcal + # Pattern name for characterisation collections, used for all data + charpattern: multiorientation + # copy calibration result to configuration directory, + # moving aside previous value + # updateblconfig: null + # # Path of file to dump persisted instrumentation configuration. OPTIONAL + # instcfgout: gphl_diffractcal_out.nml + + "GΦL Translational Calibration": + wfpath: Gphl + wftype: transcal + requires: + - samplegrid + strategies: + - title: Translational Calibration + strategy_type: transcal + wf_selection: transcal + variants: + - full + documentation: | + Translational calibration. + Designed for use by beamline personnel. + Calibrates centring motors to allow prediction of centring positions + Consists of mulltiple centrings, requiring a glass or tungsten tip + options: + # file: File containing settings of rotation axes for calibration + # The name is the relative path from the gphl_beamline_config directory + # grid: Grid definition [axis_name:start:end:step], + # slowest-varying to fastest varying + # EITHER file OR grid must be set.--> + # grid: grid_axes_spec + file: transcal_2stage.json + # copy calibration result to configuration directory, + # renaming previous result file + updateblconfig: null + # Path of file to dump persisted instrumentation configuration. OPTIONAL + # instcfgout: gphl_transcal_out.nml diff --git a/mxcubecore/configuration/mockup/qt/beamline_config.yml b/mxcubecore/configuration/mockup/qt/beamline_config.yml index c5b51c9244..cef09b5c76 100644 --- a/mxcubecore/configuration/mockup/qt/beamline_config.yml +++ b/mxcubecore/configuration/mockup/qt/beamline_config.yml @@ -2,10 +2,9 @@ --- # The class to initialise, and init parameters -_initialise_class: - class: mxcubecore.HardwareObjects.Beamline.Beamline - # Further key-value pairs here will be passed to the class init - # mode: devel +class: mxcubecore.HardwareObjects.Beamline.Beamline +# Further key-value pairs here will be passed to the class init +# mode: devel # objects # @@ -15,7 +14,7 @@ _initialise_class: # NBNB some objects that do not currently have their own config files # would need those added (e.g. the centring methods) # -_objects: +objects: # The !!o0map and the lines starting with '- ' give you an *ordered* dictionary # And thus a reproducible loading order !!omap @@ -60,36 +59,37 @@ _objects: - characterisation: characterisation-mockup.xml # - beam_realign: # Skipped - optional -# Non-object attributes: -advanced_methods: - - MeshScan - - XrayCentering +configuration: + # Non-object attributes: + advanced_methods: + - MeshScan + - XrayCentering -tunable_wavelength: true -disable_num_passes: false -run_online_processing: false -run_number: 1 + tunable_wavelength: true + disable_num_passes: false + run_online_processing: false + run_number: 1 -default_acquisition_parameters: - default: - # Default values, also used for standard acquisition. - # Values not given in other dictionaries are taken from here - exp_time: 0.04 # (s) exposure time - osc_start: 0.0 # (degrees) Only used if no current angle found - osc_range: 0.1 # (degrees) - num_passes: 1 # (int) - first_image: 1 # (int) - overlap: 0 - num_images: 1 # (int) - # detector_binning_mode: 1 # Removed as not in practice used. - inverse_beam: false # (bool) - take_dark_current: true # (bool) - skip_existing_images: true # (bool) - take_snapshots: true # (bool) + default_acquisition_parameters: + default: + # Default values, also used for standard acquisition. + # Values not given in other dictionaries are taken from here + exp_time: 0.04 # (s) exposure time + osc_start: 0.0 # (degrees) Only used if no current angle found + osc_range: 0.1 # (degrees) + num_passes: 1 # (int) + first_image: 1 # (int) + overlap: 0 + num_images: 1 # (int) + # detector_binning_mode: 1 # Removed as not in practice used. + inverse_beam: false # (bool) + take_dark_current: true # (bool) + skip_existing_images: true # (bool) + take_snapshots: true # (bool) - helical: - # Defaults for helical scan. Missing values are taken from default - num_images: 100 + helical: + # Defaults for helical scan. Missing values are taken from default + num_images: 100 characterisation: # Defaults for chareacterisation. Missing values are taken from default @@ -102,24 +102,24 @@ default_acquisition_parameters: max_crystal_vphi: 1.0 min_crystal_vphi: 1.0 - advanced: - # Defaults for 'advanced' acquisition. Missing values are taken from default - osc_range: 0.5 - num_images: 100 + advanced: + # Defaults for 'advanced' acquisition. Missing values are taken from default + osc_range: 0.5 + num_images: 100 -acquisition_limit_values: - exposure_time: # (s) - - 0.04 - - 6000.0 - osc_range: # (degrees) - - -1000.0 - - 10000.0 - number_of_images: # (int) - - 1 - - 99999 - kappa: # (degrees) - - 0.0 - - 180.0 + acquisition_limit_values: + exposure_time: # (s) + - 0.04 + - 6000.0 + osc_range: # (degrees) + - -1000.0 + - 10000.0 + number_of_images: # (int) + - 1 + - 99999 + kappa: # (degrees) + - 0.0 + - 180.0 # kappa_phi: # (degrees) # - 0.0 # - 360.0 diff --git a/mxcubecore/configuration/mockup/xray_centring2.yml b/mxcubecore/configuration/mockup/xray_centring2.yml index 994ee0f74a..d596e54160 100644 --- a/mxcubecore/configuration/mockup/xray_centring2.yml +++ b/mxcubecore/configuration/mockup/xray_centring2.yml @@ -1,5 +1,4 @@ %YAML 1.2 --- -_initialise_class: - class: mxcubecore.HardwareObjects.mockup.XrayCentringMockup.XrayCentringMockup +class: mxcubecore.HardwareObjects.mockup.XrayCentringMockup.XrayCentringMockup From 0d7e3fed619718539a6c30569ae75124db222f41 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Thu, 30 May 2024 12:24:31 +0100 Subject: [PATCH 07/53] Bug fix - and added warning for unrecognised role --- mxcubecore/HardwareRepository.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 0844289db5..2c93718611 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -56,6 +56,8 @@ string_types, ) +from warnings import warn + if TYPE_CHECKING: from mxcubecore.BaseHardwareObjects import HardwareObject @@ -215,7 +217,11 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): ) if not msg0: if _container: - setattr(_container._config, role, result) + if not hasattr(_container, role): + warn( + f"load_from_yaml Class {_container.__class__.__name__} has no attribute {role}" + ) + setattr(_container, role, result) try: # Initialise object result.init() From 198792fbfb59bae54811b21e5f05c4ed49108be0 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Thu, 30 May 2024 14:13:46 +0100 Subject: [PATCH 08/53] Bug fix - and added warning for unrecognised role --- mxcubecore/BaseHardwareObjects.py | 49 ++++++++----------- mxcubecore/HardwareRepository.py | 5 +- .../mockup/gphl/gphl-workflow.yml | 3 +- 3 files changed, 26 insertions(+), 31 deletions(-) diff --git a/mxcubecore/BaseHardwareObjects.py b/mxcubecore/BaseHardwareObjects.py index 183e536534..e6c63dca8f 100644 --- a/mxcubecore/BaseHardwareObjects.py +++ b/mxcubecore/BaseHardwareObjects.py @@ -21,6 +21,7 @@ from __future__ import absolute_import import ast +import copy import enum import logging import typing @@ -99,7 +100,7 @@ def __init__(self, **kwargs): self.__dict__.update(kwargs) def model_dump(self): - return self.__dict__.copy() + return copy.deepcopy(self.__dict__) def __init__( self, name: str, hwobj_container: Optional["ConfiguredObject"] = None @@ -112,6 +113,7 @@ def __init__( self._name = name self._config: Optional["ConfiguredObject.HOConfig"] = None self._hwobj_container: Optional[ConfiguredObject] = hwobj_container + self._roles = [] def __getattr__(self, attr): return getattr(self.__dict__["_config"], attr) @@ -149,7 +151,7 @@ def hwobj_container(self): def get_by_id(self, _id: str) -> "ConfiguredObject": result = self for name in _id.split("."): - result = getattr(result._config, name) + result = getattr(result, name) if result is None: break # @@ -164,31 +166,27 @@ def objects_by_role(self) -> Dict[str, Union[Self, None]]: Returns: OrderedDict[str, Union[Self, None]]: Contained objects mapped by role. """ - if self._config is not None: - return dict( - tpl - for tpl in self._config.model_dump().items() - if isinstance(tpl[1], ConfiguredObject) - ) - elif isinstance(self, HardwareObjectNode): - # NBNB TEMPORARY for transition to yaml configuration only - return self._objects_by_role.copy() + result = {} + for tag in self._roles: + if hasattr(self, tag): + result[tag] = getattr(self, tag) + else: + raise ValueError( + "%s object has no attribute %s" % (self.__class__.__name__, tag) + ) + # + return result def get_properties(self) -> Dict[str, Any]: """Get configured properties (not roles)""" if self._config is not None: - return dict( - tpl - for tpl in self._config.model_dump().items() - if not isinstance(tpl[1], ConfiguredObject) and tpl[1] is not None - ) - + return self._config.model_dump() elif isinstance(self, HardwareObjectNode): # NBNB TEMPORARY for transition to yaml configuration only return HardwareObjectNode.get_properties(self) def get_property(self, name: str, default_value: Optional[Any] = None) -> Any: - """Get property value or contained HardwareObject. + """Get property value . Args: name (str): Name @@ -198,11 +196,10 @@ def get_property(self, name: str, default_value: Optional[Any] = None) -> Any: Any: Property value. """ if self._config is not None: - return ( - getattr(self._config, name) - if hasattr(self._config, name) - else default_value - ) + if hasattr(self._config, name): + return getattr(self._config, name) + else: + return default_value elif isinstance(self, HardwareObjectNode): # NBNB TEMPORARY for transition to yaml configuration only return HardwareObjectNode.get_property(self, name, default_value) @@ -213,11 +210,7 @@ def get_roles(self) -> List[str]: Returns: List[str]: List of hardware object roles. """ - warnings.warn( - "%s.get_roles is deprecated. Avoid, or use objects_by_role instead" - % self.__class__.__name__ - ) - return list(self.objects_by_role.keys()) + return list(self._roles) def print_log( self, diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 2c93718611..43eb2df162 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -221,12 +221,12 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): warn( f"load_from_yaml Class {_container.__class__.__name__} has no attribute {role}" ) + _container._roles.append(role) setattr(_container, role, result) try: # Initialise object result.init() except Exception: - raise if _container: msg0 = "Error in %s.init()" % cls.__name__ else: @@ -267,8 +267,9 @@ def _attach_xml_objects(container, hwobj, role): hwobj._hwobj_container = container hwobj._name = role + container._roles.append(role) hwobj._config = hwobj.HOConfig(**hwobj.get_properties()) - setattr(container._config, role, hwobj) + setattr(container, role, hwobj) objects_by_role = hwobj._objects_by_role for role2, hwobj2 in objects_by_role.items(): _attach_xml_objects(hwobj, hwobj2, role2) diff --git a/mxcubecore/configuration/mockup/gphl/gphl-workflow.yml b/mxcubecore/configuration/mockup/gphl/gphl-workflow.yml index c1f4f5ea72..594543b1df 100644 --- a/mxcubecore/configuration/mockup/gphl/gphl-workflow.yml +++ b/mxcubecore/configuration/mockup/gphl/gphl-workflow.yml @@ -100,7 +100,8 @@ configuration: # Multiple acquisitions in order - characterisation then main # passed to set_pre_strategy_params and set_pre_acquisition_params # NB as long as we only acquire either characterisation+main or diffractcal - # the code will use list[0] for the first acquisition and list[-1] for the main one + # the code will use list[0] for the first acquisition and + # list[-1] for the main one auto_acq_parameters: # For characterisation acquisition - exposure_time: 0.02 From b4ab9b02063b95a9e54cbcd567dbd6f8b1dcd9d0 Mon Sep 17 00:00:00 2001 From: Marcus Oskarsson Date: Thu, 30 May 2024 14:27:32 +0200 Subject: [PATCH 09/53] Updated access to elements --- mxcubecore/HardwareObjects/mockup/EnergyScanMockup.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mxcubecore/HardwareObjects/mockup/EnergyScanMockup.py b/mxcubecore/HardwareObjects/mockup/EnergyScanMockup.py index 477c3080ac..fd8a3bf589 100644 --- a/mxcubecore/HardwareObjects/mockup/EnergyScanMockup.py +++ b/mxcubecore/HardwareObjects/mockup/EnergyScanMockup.py @@ -389,9 +389,10 @@ def do_chooch(self, elt, edge, scan_directory, archive_directory, prefix): def get_elements(self): elements = [] + try: - for el in self["elements"]: - elements.append({"symbol": el.symbol, "energy": el.energy}) + for el in self.config.elements["element"]: + elements.append({"symbol": el["symbol"], "energy": el["energy"]}) except IndexError: pass return elements From 2570e23a5c3b41061773251e174ea6e30bb9d7ee Mon Sep 17 00:00:00 2001 From: Marcus Oskarsson Date: Thu, 30 May 2024 14:27:50 +0200 Subject: [PATCH 10/53] Added harvester to beamline object --- mxcubecore/HardwareObjects/Beamline.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mxcubecore/HardwareObjects/Beamline.py b/mxcubecore/HardwareObjects/Beamline.py index f5f69d7786..475e648d35 100644 --- a/mxcubecore/HardwareObjects/Beamline.py +++ b/mxcubecore/HardwareObjects/Beamline.py @@ -123,6 +123,8 @@ class HOConfig(ConfiguredObject.HOConfig): resolution = None sample_changer = None sample_changer_maintenance = None + harvester = None + harvester_maintenance = None plate_manipulator = None session = None lims = None From 4e0a888f7de21ac46f8f49fe4da33299cec3bb50 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 1 Jul 2024 11:25:12 +0200 Subject: [PATCH 11/53] yaml: update edna workflow to new API The method self.name() is now a property self.name. Also, use the f-string for generatng log messages, for a more modern look and feel of the code. --- mxcubecore/HardwareObjects/EdnaWorkflow.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mxcubecore/HardwareObjects/EdnaWorkflow.py b/mxcubecore/HardwareObjects/EdnaWorkflow.py index ea2291d6da..56cf2fbb97 100644 --- a/mxcubecore/HardwareObjects/EdnaWorkflow.py +++ b/mxcubecore/HardwareObjects/EdnaWorkflow.py @@ -107,9 +107,7 @@ def set_command_failed(self, *args): def state_changed(self, new_value): new_value = str(new_value) - logging.getLogger("HWR").debug( - "%s: state changed to %r", str(self.name()), new_value - ) + logging.getLogger("HWR").debug(f"{self.name}: state changed to {new_value}") self.emit("stateChanged", (new_value,)) def workflow_end(self): From 1bc736d4a5d70d856047a1dc3ad80db0b30ff55c Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 1 Jul 2024 11:28:49 +0200 Subject: [PATCH 12/53] yaml: include YAML loaded HWOs into 'hardware_objects' dictionary Behave in the same way as when loading XML configured HWO. The 'hardware_objects' dictionary is for example used by mxcubeweb adapters layer. If the HWO is not listed there, the adaptor for it will not load. --- mxcubecore/HardwareRepository.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 43eb2df162..74e417eaa2 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -195,9 +195,10 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): fname, fext = os.path.splitext(config_file) if fext in (".yaml", ".yml"): - load_from_yaml( + hwobj = load_from_yaml( config_file, role=role1, _container=result, _table=_table ) + _instance.hardware_objects[f"/{hwobj.load_name}"] = hwobj elif fext == ".xml": msg1 = "" time0 = time.time() From 79bae0fe65d709c405547735ecc963de1069795e Mon Sep 17 00:00:00 2001 From: Marcus Oskarsson Date: Thu, 27 Jun 2024 08:58:04 +0200 Subject: [PATCH 13/53] adds dynamic 'export yaml config' feature Adds an optional argument 'yaml_export_directory' to HardwareRepository.init_hardware_repository() function. When argument is provided, the HWO YAML config files will be written to the specified directory. The aim of this feature is assist in migrating beamline XML configuration to YAML style. --- mxcubecore/HardwareRepository.py | 80 +++++++++++++++++++++----------- 1 file changed, 53 insertions(+), 27 deletions(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 74e417eaa2..4ee8b94b14 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -39,10 +39,13 @@ import traceback import weakref from datetime import datetime +from pathlib import Path from typing import ( TYPE_CHECKING, + Optional, Union, ) +from warnings import warn from ruamel.yaml import YAML @@ -56,8 +59,6 @@ string_types, ) -from warnings import warn - if TYPE_CHECKING: from mxcubecore.BaseHardwareObjects import HardwareObject @@ -80,13 +81,13 @@ BEAMLINE_CONFIG_FILE = "beamline_config.yml" -# Temporary hack to export yaml config file verions after loading -# Set to an existing directory to trigger output of yaml config files -# EXPORT_CONFIG_DIR = "/home/rhfogh/pycharm/mock_config_dirs_tmp" -EXPORT_CONFIG_DIR = None - - -def load_from_yaml(configuration_file, role, _container=None, _table=None): +def load_from_yaml( + configuration_file, + role, + yaml_export_directory: Optional[Path] = None, + _container=None, + _table=None, +): """ Args: @@ -179,7 +180,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if not msg0: # Recursively load contained objects (of any type that the system can support) objects = configuration.pop("objects", {}) - config = configuration.pop("configuration", {}) + config = configuration.pop("configuration", {}) # Set configuration with non-object properties. result._config = result.HOConfig(**config) @@ -196,7 +197,11 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): fname, fext = os.path.splitext(config_file) if fext in (".yaml", ".yml"): hwobj = load_from_yaml( - config_file, role=role1, _container=result, _table=_table + config_file, + role=role1, + yaml_export_directory=yaml_export_directory, + _container=result, + _table=_table, ) _instance.hardware_objects[f"/{hwobj.load_name}"] = hwobj elif fext == ".xml": @@ -209,7 +214,7 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): msg1 = "No object loaded" else: class_name1 = hwobj.__class__.__name__ - _attach_xml_objects(result, hwobj, role1) + _attach_xml_objects(yaml_export_directory, result, hwobj, role1) except Exception as ex: msg1 = "Loading error (%s)" % str(ex) load_time = 1000 * (time.time() - time0) @@ -239,13 +244,18 @@ def load_from_yaml(configuration_file, role, _container=None, _table=None): if _container is None: print(make_table(column_names, _table)) - elif EXPORT_CONFIG_DIR: - # temporary hack - _export_draft_config_file(result) - # + elif yaml_export_directory: + if result: + _export_draft_config_file(yaml_export_directory, result) + return result -def _export_draft_config_file(hwobj): + +def _export_draft_config_file(dest_dir: Path, hwobj): + def write_yaml(data, file_name: str): + dest_dir.mkdir(parents=True, exist_ok=True) + yaml.dump(data, Path(dest_dir, file_name)) + result = { "class": "%s.%s" % (hwobj.__class__.__module__, hwobj.__class__.__name__), } @@ -253,15 +263,20 @@ def _export_draft_config_file(hwobj): if objects_by_role: objects = result["objects"] = {} for role, obj in objects_by_role.items(): - objects[role] = "%s.yml" % obj.id - config = result["configuration"] ={} + try: + objects[role] = "%s.yml" % obj.id + except: + logging.getLogger("HWR").exception("") + + config = result["configuration"] = {} for tag, val in hwobj.config.model_dump().items(): if tag not in objects_by_role: config[tag] = val - fp = open(os.path.join(EXPORT_CONFIG_DIR, "%s.yml" % hwobj.id), "w") - yaml.dump(result, fp) -def _attach_xml_objects(container, hwobj, role): + write_yaml(result, "%s.yml" % hwobj.id) + + +def _attach_xml_objects(yaml_export_directory: Optional[Path], container, hwobj, role): """Recursively attach XML-configured object to container as role NBNB guard against duplicate objects""" @@ -269,11 +284,12 @@ def _attach_xml_objects(container, hwobj, role): hwobj._hwobj_container = container hwobj._name = role container._roles.append(role) + hwobj._config = hwobj.HOConfig(**hwobj.get_properties()) setattr(container, role, hwobj) objects_by_role = hwobj._objects_by_role for role2, hwobj2 in objects_by_role.items(): - _attach_xml_objects(hwobj, hwobj2, role2) + _attach_xml_objects(yaml_export_directory, hwobj, hwobj2, role2) for tag in hwobj._objects_names(): if tag not in objects_by_role: # Complex object, not contained hwobj @@ -283,9 +299,10 @@ def _attach_xml_objects(container, hwobj, role): else: setattr(hwobj.config, tag, objs) # - if EXPORT_CONFIG_DIR: + if yaml_export_directory: # temporary hack - _export_draft_config_file(hwobj) + if hwobj: + _export_draft_config_file(yaml_export_directory, hwobj) def _convert_xml_property(hwobj): @@ -325,12 +342,17 @@ def set_user_file_directory(user_file_directory): BaseHardwareObjects.HardwareObjectNode.set_user_file_directory(user_file_directory) -def init_hardware_repository(configuration_path): +def init_hardware_repository( + configuration_path: str, + yaml_export_directory: Optional[Path] = None, +): """Initialise hardware repository - must be run at program start Args: configuration_path (str): PATHSEP-separated string of directories giving configuration file lookup path + yaml_export_directory: if specified, loaded hardware objects configuration + will be written to this directory, as YAML files Returns: @@ -360,7 +382,11 @@ def init_hardware_repository(configuration_path): logging.getLogger("HWR").info("Hardware repository: %s", configuration_path) _instance = __HardwareRepositoryClient(configuration_path) _instance.connect() - beamline = load_from_yaml(BEAMLINE_CONFIG_FILE, role="beamline") + beamline = load_from_yaml( + BEAMLINE_CONFIG_FILE, + role="beamline", + yaml_export_directory=yaml_export_directory, + ) def uninit_hardware_repository(): From 9ddb96782d1a7605877db53d5ed861688388669f Mon Sep 17 00:00:00 2001 From: rhfogh Date: Fri, 5 Jul 2024 14:45:30 +0100 Subject: [PATCH 14/53] Bug fix: Added '#' in front of ''%YAML 1.2' --- mxcubecore/configuration/lnls_manaca/beamline_config.yml | 2 +- mxcubecore/configuration/lnls_sol/beamline_config.yml | 2 +- .../configuration/mockup/gphl/gphl_wf_test_parameters.yml | 2 +- mxcubecore/configuration/mockup/qt/beamline_config.yml | 2 +- mxcubecore/configuration/mockup/web/beamline_config.yml | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/mxcubecore/configuration/lnls_manaca/beamline_config.yml b/mxcubecore/configuration/lnls_manaca/beamline_config.yml index 34be52f048..7e853e9e0b 100644 --- a/mxcubecore/configuration/lnls_manaca/beamline_config.yml +++ b/mxcubecore/configuration/lnls_manaca/beamline_config.yml @@ -1,4 +1,4 @@ -%YAML 1.2 +# %YAML 1.2 --- # The class to initialise, and init parameters diff --git a/mxcubecore/configuration/lnls_sol/beamline_config.yml b/mxcubecore/configuration/lnls_sol/beamline_config.yml index 86c7649389..2df5dbc190 100644 --- a/mxcubecore/configuration/lnls_sol/beamline_config.yml +++ b/mxcubecore/configuration/lnls_sol/beamline_config.yml @@ -1,4 +1,4 @@ -%YAML 1.2 +# %YAML 1.2 --- # The class to initialise, and init parameters diff --git a/mxcubecore/configuration/mockup/gphl/gphl_wf_test_parameters.yml b/mxcubecore/configuration/mockup/gphl/gphl_wf_test_parameters.yml index c3006e8a6d..34ddac0ada 100644 --- a/mxcubecore/configuration/mockup/gphl/gphl_wf_test_parameters.yml +++ b/mxcubecore/configuration/mockup/gphl/gphl_wf_test_parameters.yml @@ -1,4 +1,4 @@ -%YAML 1.2 +# %YAML 1.2 --- ### Calling interface (for automation) diff --git a/mxcubecore/configuration/mockup/qt/beamline_config.yml b/mxcubecore/configuration/mockup/qt/beamline_config.yml index cef09b5c76..28118e2cbf 100644 --- a/mxcubecore/configuration/mockup/qt/beamline_config.yml +++ b/mxcubecore/configuration/mockup/qt/beamline_config.yml @@ -1,4 +1,4 @@ -%YAML 1.2 +# %YAML 1.2 --- # The class to initialise, and init parameters diff --git a/mxcubecore/configuration/mockup/web/beamline_config.yml b/mxcubecore/configuration/mockup/web/beamline_config.yml index f41aa654b2..61187d1166 100644 --- a/mxcubecore/configuration/mockup/web/beamline_config.yml +++ b/mxcubecore/configuration/mockup/web/beamline_config.yml @@ -1,4 +1,4 @@ -%YAML 1.2 +# %YAML 1.2 --- # The class to initialise, and init parameters From cdd4404440458cdd7ab5c821d2880e178969c059 Mon Sep 17 00:00:00 2001 From: rhfogh Date: Fri, 5 Jul 2024 14:54:24 +0100 Subject: [PATCH 15/53] Changed YAML default indentation to yaml.indent(mapping=2, sequence=4, offset=2) --- mxcubecore/model/queue_model_objects.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mxcubecore/model/queue_model_objects.py b/mxcubecore/model/queue_model_objects.py index 4b4c981aa2..c8a9dd72f5 100644 --- a/mxcubecore/model/queue_model_objects.py +++ b/mxcubecore/model/queue_model_objects.py @@ -39,7 +39,7 @@ yaml = YAML(typ="safe", pure=True) # The following are not needed for load, but define the default style. yaml.default_flow_style = False - yaml.indent(mapping=4, sequence=4, offset=2) + yaml.indent(mapping=2, sequence=4, offset=2) except Exception: logging.getLogger("HWR").warning( "Cannot import dependenices needed for GPHL workflows - GPhL workflows might not work" From b13a606346de737be837f0d2657a927a9849040e Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 2 Aug 2024 15:02:46 +0200 Subject: [PATCH 16/53] reshuffle code for populating _config attribute for XML configs When loading HWOBJ with XML config file, populate its _config attribute _before_ calling init() method. This way, the self.config attribute can be used inside init() method for both YAML and XML configured hardware objects. Before this change, self.config was only available for YAML configured objects. For XML configured objects, the self.config was 'None'. --- mxcubecore/HardwareRepository.py | 32 ++++++++++++++++++++++---------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 4ee8b94b14..02dff36b0a 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -285,20 +285,11 @@ def _attach_xml_objects(yaml_export_directory: Optional[Path], container, hwobj, hwobj._name = role container._roles.append(role) - hwobj._config = hwobj.HOConfig(**hwobj.get_properties()) setattr(container, role, hwobj) objects_by_role = hwobj._objects_by_role for role2, hwobj2 in objects_by_role.items(): _attach_xml_objects(yaml_export_directory, hwobj, hwobj2, role2) - for tag in hwobj._objects_names(): - if tag not in objects_by_role: - # Complex object, not contained hwobj - objs = list(_convert_xml_property(obj) for obj in hwobj._get_objects(tag)) - if len(objs) == 1: - setattr(hwobj.config, tag, objs[0]) - else: - setattr(hwobj.config, tag, objs) - # + if yaml_export_directory: # temporary hack if hwobj: @@ -317,6 +308,26 @@ def _convert_xml_property(hwobj): return result +def _create_config_for_xml_hwobj(hwobj: BaseHardwareObjects.HardwareObjectNode): + """ + Populate hwobj._config attribute for an HWOBJ loaded with XML configure file. + + This allows to access HWOBJ configuration uniformly for both YAML and XML + configured objects, using its 'config' attribute. + """ + hwobj._config = hwobj.HOConfig(**hwobj.get_properties()) + + objects_by_role = hwobj._objects_by_role + for tag in hwobj._objects_names(): + if tag not in objects_by_role: + # Complex object, not contained hwobj + objs = list(_convert_xml_property(obj) for obj in hwobj._get_objects(tag)) + if len(objs) == 1: + setattr(hwobj.config, tag, objs[0]) + else: + setattr(hwobj.config, tag, objs) + + def add_hardware_objects_dirs(ho_dirs): """Adds directories with xml/yaml config files @@ -550,6 +561,7 @@ def hardwareObjectDeleted(name=hwobj_instance.name): comment = "Failed to add all commands and/or channels" try: + _create_config_for_xml_hwobj(hwobj_instance) hwobj_instance._init() hwobj_instance.init() class_name = str(hwobj_instance.__module__) From caba802ae5b027810aaa1b78bfd0e0ca34939786 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 12 Aug 2024 16:42:54 +0200 Subject: [PATCH 17/53] fix issue with skipped calls to init() for YAML HWOBJs Only set 'msg0' variable when loading the 'beamline_config.yml' file. Otherwise the call to HWOBJ.init() will be skipped on line 225, if it have any sub-HWOBJs configured. --- mxcubecore/HardwareRepository.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 02dff36b0a..328b04d8c2 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -184,7 +184,7 @@ def load_from_yaml( # Set configuration with non-object properties. result._config = result.HOConfig(**config) - if objects: + if _container is None: load_time = 1000 * (time.time() - start_time) msg1 = "Start loading contents:" _table.append( From 8bbbd5d3ac54a34d346182b29e37a0df97f23c6c Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 2 Aug 2024 15:14:05 +0200 Subject: [PATCH 18/53] update Session HWOBJ to work with both YAML and XML configs Update Session HWOBJ to access its configuration using 'self.config' attribute and get_property() method. Drops unused 'template' and 'suffix' attributes. Converts 'synchrotron_name', 'beamline_name' and 'endstation_name' to be proxy attributes for fields in 'self.config', this way there no need copy values. Replace self["foo"] calls, they are not supported when YAML config file is used. --- mxcubecore/HardwareObjects/Session.py | 85 ++++++++++++--------------- 1 file changed, 38 insertions(+), 47 deletions(-) diff --git a/mxcubecore/HardwareObjects/Session.py b/mxcubecore/HardwareObjects/Session.py index b9d6676ac1..69535f20a4 100644 --- a/mxcubecore/HardwareObjects/Session.py +++ b/mxcubecore/HardwareObjects/Session.py @@ -22,23 +22,15 @@ class Session(HardwareObject): def __init__(self, name): HardwareObject.__init__(self, name) - self.synchrotron_name = None - self.beamline_name = None - self.session_id = None self.proposal_code = None self.proposal_number = None self.proposal_id = None self.in_house_users = [] - self.endstation_name = None self.session_start_date = None self.user_group = "" self.email_extension = None - self.template = None - self.default_precision = 5 - self.suffix = None - self.base_directory = None self.base_process_directory = None self.base_archive_directory = None @@ -46,47 +38,51 @@ def __init__(self, name): self.raw_data_folder_name = default_raw_data_folder self.processed_data_folder_name = default_processed_data_folder - # Framework-2 method, inherited from HardwareObject and called - # by the framework after the object has been initialized. - def init(self): - self.synchrotron_name = self.get_property("synchrotron_name") - self.beamline_name = self.get_property("beamline_name") - self.endstation_name = self.get_property("endstation_name").lower() + @property + def synchrotron_name(self) -> str: + return self.config.synchrotron_name - self.suffix = self["file_info"].get_property("file_suffix") - self.template = self["file_info"].get_property("file_template") + @property + def beamline_name(self) -> str: + return self.config.beamline_name - base_directory = self["file_info"].get_property("base_directory") + @property + def endstation_name(self) -> str: + return self.config.endstation_name - base_process_directory = self["file_info"].get_property( - "processed_data_base_directory" - ) + def init(self): + def get_inhouse_proposals(): + """ + get the optional 'inhouse_users' config property - base_archive_directory = self["file_info"].get_property( - "archive_base_directory" - ) + return the property, or an empty list if the property is not specified + """ + inhouse_users = self.get_property("inhouse_users") + if inhouse_users: + return inhouse_users.get("proposal", []) + + # property not specified + return [] - folder_name = self["file_info"].get_property("raw_data_folder_name") + file_info = self.config.file_info + + folder_name = file_info.get("raw_data_folder_name") if folder_name and folder_name.strip(): self.raw_data_folder_name = folder_name - folder_name = self["file_info"].get_property("processed_data_folder_name") + folder_name = file_info.get("processed_data_folder_name") if folder_name and folder_name.strip(): self.processed_data_folder_name = folder_name - archive_folder = self["file_info"].get_property("archive_folder") + archive_folder = file_info.get("archive_folder") if archive_folder: archive_folder = archive_folder.strip() if not archive_folder: archive_folder = default_archive_folder - try: - inhouse_proposals = self["inhouse_users"]["proposal"] - for prop in inhouse_proposals: - self.in_house_users.append( - (prop.get_property("code"), str(prop.get_property("number"))) - ) - except KeyError: - pass + + self.in_house_users = [ + (prop["code"], str(prop["number"])) for prop in get_inhouse_proposals() + ] email_extension = self.get_property("email_extension") if email_extension: @@ -99,21 +95,23 @@ def init(self): pass self.set_base_data_directories( - base_directory, - base_process_directory, - base_archive_directory, + file_info["base_directory"], + file_info["processed_data_base_directory"], + file_info["archive_base_directory"], raw_folder=self.raw_data_folder_name, process_folder=self.processed_data_folder_name, archive_folder=archive_folder, ) try: - precision = int(self["file_info"].get_property("precision", "")) + precision = int(file_info.get("precision", "")) except ValueError: precision = self.default_precision PathTemplate.set_precision(precision) - PathTemplate.set_path_template_style(self.synchrotron_name, self.template) + PathTemplate.set_path_template_style( + self.synchrotron_name, file_info.get("file_template") + ) def set_base_data_directories( self, @@ -322,14 +320,7 @@ def get_default_subdir(self, sample_data: dict) -> str: return subdir.replace(":", "-") def get_archive_directory(self): - archive_directory = os.path.join( - self["file_info"].get_property("archive_base_directory"), - self["file_info"].get_property("archive_folder"), - ) - - archive_directory = PathTemplate.get_archive_directory() - - return archive_directory + return PathTemplate.get_archive_directory() def get_proposal(self): """ From c6b17ff14cb729af4efd6f6b04c4174d3efe7359 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Tue, 20 Aug 2024 12:12:38 +0200 Subject: [PATCH 19/53] docs: update and expand 'Configuration files' section Update and exapand documentation of YAML configuration files. - remove outdated information - update names of changed identifiers - add more details and some examples Co-authored-by: fabcor --- docs/source/dev/configuration_files.md | 83 +++++++++++++++++++------- 1 file changed, 63 insertions(+), 20 deletions(-) diff --git a/docs/source/dev/configuration_files.md b/docs/source/dev/configuration_files.md index 677b17c496..5d68763e7e 100644 --- a/docs/source/dev/configuration_files.md +++ b/docs/source/dev/configuration_files.md @@ -37,41 +37,84 @@ Each YAML-configured object has a `name` attribute, which is equal to the role that identifies the object within the containing object (the name of the Beamline object is `beamline`). -YAML-configured objects must be subclasses of the `BaseHardwareObjects.ConfiguredObject` class. -HardwareObjects proper (which excludes e.g. `Beamline` and procedures) -are subclasses of `BaseHardwareObjects.HadwareObjectYaml`. -The most complete example is the Beamline object and the comments in `beamline_config.yml` -are the best guide to the syntax of YAML configuration files. +The most complete example of a YAML-configured object is the `Beamline` object. +The comments in `beamline_config.yml` are the best guide to the syntax of YAML configuration files. It is a key principle of YAML-configured classes that **all** attributes added in the configuration must match a pre-defined attribute coded in the class. This means that you can look in the class code to see which attributes are available. -The only exception is the `_initialise_class` attribute at the start of the file. -This dictionary contains the import name of the class that is to be created, -and optionally parameters to be passed to the `init()` method of that class. -The `_objects` attribute in the file gives the HardwareObjects that are contained in + +The only exception is the `class` attribute at the start of the YAML configuration file. +This attribute specifies the import name of the class that is to be created. + +The `objects` dictionary in the file gives the HardwareObjects that are contained in (i.e. children of) the object. The dictionary key is the role name, and the value is the name of the configuration file. -Each `role_name` must match a read-only property coded in the body of the class, -and must be added to the `__content_roles` list of the class by the class code. +Each `role_name` must match a read-only property coded in the body of the class. Note that classes are loaded and initialised in the order given by this list, so that there is a reproducible loading order. Contained objects can be defined as procedures, so that they are added to the list of procedures. Each YAML-configured class has an `_init()` method that is executed immediately after the object is created, and an `init()` function that is executed after configured parameters and contained objects have been loaded. -### Accessing configuration data +Below is an example YAML configuration file: + +```yaml +class: ISPyBClientMockup.ISPyBClientMockup +configuration: + base_result_url: https://your.limsresults.org + login_type: proposal +objects: + lims_rest: lims_rest.yaml + session: session.yaml +``` + +This file specifies a hardware object, which is an instance of the `ISPyBClientMockup` class. +That object will have two configuration properties `base_result_url` and `login_type`. +Two child objects with roles `lims_rest` and `session` will be loaded from the specified configuration files. + +### Accessing configuration properties + +The contents of the `configuration` section will be available as a `config` attribute of the hardware object. +It is also possible to access the configuration with the `get_property()` and `get_properties()` methods. +Below is an example of how configuration can be read in the `init()` method of a hardware object. + +```python +def init(self): + # access 'file_info' config property via 'config' attribute + file_info = self.config.file_info + + # access 'file_info' config property via 'get_property()' method + file_info = self.get_property("file_info") + + # get all of the object's config properties + all_props = self.get_properties() +``` + +Note that you should only access an object's configuration properties from its implementation class. +The values of an object's configuration properties are considered implementation details of that hardware object. +If access is required by outside code, then it should be provided by the object's client API. + +### Accessing child objects + +The `objects` dictionary in the YAML configuration file specifies the child objects. +These child objects can be accessed via the parent object's `role_name` attribute. +For example, if a hardware object is configured with this configuration file: + +```yaml +class: Foo.Foo +objects: + bar: gazonk.yaml +``` + +An instance of the `Foo` class will be created. +This instance will have a child object with `bar` role, with configuration from `gazonk.yaml` file. +That child object will be accessible with `foo.bar` python expression, where `foo` is the parent object. + The Beamline object (`HardwareRepository.beamline`) is a YAML-configured object, and is the starting point for finding other hardware objects. These may in turn contain other objects, so you can do e.g. `HardwareRepository.beamline.detector.distance` to get the detector distance motor object. -Configured properties are similarly accessed as simple attributes, e,g, `beamline.default_acquisition_parameters`. -Each `ConfiguredObject` has three special properties and one function to deal with the objects contained within it. -These are: - -- `all_roles`: a list of the roles (attribute names) of contained HardwareObjects, in loading order; -- `all_objects_by_role`: an ordered dictionary of contained HardwareObjects; -- `procedures` an ordered dictionary of HardwareObjects for procedures; -- `replace_object()`: a method to replace an existing configured object at runtime with a new object. + ## XML-configured objects ### Code and file structure From 845963e71870122b05d1dcd92199e0e141fcff6c Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 12 Aug 2024 17:05:12 +0200 Subject: [PATCH 20/53] correct deprecation warning fix suggestion When warning that get_object_by_role() is deprecated, suggest using an attribute instead. Using get_property() to fetch objects does not work. --- mxcubecore/BaseHardwareObjects.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mxcubecore/BaseHardwareObjects.py b/mxcubecore/BaseHardwareObjects.py index e6c63dca8f..986c0a55ee 100644 --- a/mxcubecore/BaseHardwareObjects.py +++ b/mxcubecore/BaseHardwareObjects.py @@ -525,11 +525,11 @@ def get_object_by_role(self, role: str) -> Union["HardwareObject", None]: Returns: Union[HardwareObject, None]: Hardware object. """ + role = str(role).lower() warnings.warn( - "%s.get_object_by_role is deprecated. Use get_property instead" - % self.__class__.__name__ + f"{self.__class__.__name__}.get_object_by_role is deprecated. " + f"Use attribute '{role}' to access this object." ) - role = str(role).lower() objects = [self] for curr in objects: From f6bea5d62063a7b6964cbb936d463613d8dbd88c Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 5 Jul 2024 14:35:10 +0200 Subject: [PATCH 21/53] docs: XML to YAML config migration document Adds a documentation section 'YAML configuration migration', with details on how to migrate to YAML configration. Co-authored-by: fabcor --- docs/source/dev/yaml_conf_migration.md | 188 +++++++++++++++++++++++++ 1 file changed, 188 insertions(+) create mode 100644 docs/source/dev/yaml_conf_migration.md diff --git a/docs/source/dev/yaml_conf_migration.md b/docs/source/dev/yaml_conf_migration.md new file mode 100644 index 0000000000..8587c9eae8 --- /dev/null +++ b/docs/source/dev/yaml_conf_migration.md @@ -0,0 +1,188 @@ +# YAML configuration migration + +Historically, MXCuBE used XML files for configuring hardware objects. +Now it's possible to use YAML files instead of XML files. +Currently, it is possible to use both XML and YAML files within the same beamline configuration. +You can use XML for some hardware objects and YAML for others. +This allows for a gradual migration to YAML configuration. + +When using a YAML file, you may need to update the Python code of your hardware object. +Some of the methods for accessing hardware object configuration are not supported when using YAML. + +This document provides some guidance on needed modification when migrating from XML to YAML. + +## `beamline_config.yml` format changes + +The format of the entry point configuration file `beamline_config.yml` have changed. +The old format had the following style: + +```yaml +_initialise_class: + class: mxcubecore.HardwareObjects.Beamline.Beamline +_objects: + !!omap + - session: session.xml + - data_publisher: data_publisher.xml + - machine_info: machine_info.xml + +# Non-object attributes: +advanced_methods: + - MeshScan + - XrayCentering +tunable_wavelength: true +``` + +This format is not supported anymore. +The new format, corresponding to the example above, looks like this: + +```yaml +class: mxcubecore.HardwareObjects.Beamline.Beamline +objects: + !!omap + - session: session.xml + - data_publisher: data_publisher.xml + - machine_info: machine_info.xml +configuration: + advanced_methods: + - MeshScan + - XrayCentering + tunable_wavelength: true +``` + +The changes to the format are outlined below. + +### `_initialise_class` + +The `_initialise_class` dictionary has been replaced by a `class` key-value pair. + + +### `_objects` + +The `_objects` dictionary has been renamed to `objects`. +Otherwise, the format of the dictionary is the same as before. + +### New `configuration` dictionary + +All the general configuration parameters of the beamline have moved inside the new `configuration` dictionary. + +## Converting XML files to YAML + +Note that MXCuBE-Web provides some support for automatically converting XML configuration files to YAML. +See the [YAML configuration migration](https://mxcubeweb.readthedocs.io/en/latest/dev/yaml_conf_migration.html) +section in MXCuBE-Web documentation for details. +For details on the format of the YAML configurations file, see [Yaml-configured objects](configuration_files.md#yaml-configured-objects). + +This section provides an example of equivalent configuration in XML and YAML formats. +Given the following XML configuration file: + +```xml + + + prop_val + + uz + ve + + + + + + +``` + +Gives the following equivalent configuration in YAML: + +```yaml +class: Shanxi.Shanxi +configuration: + simple_prop: prop_val + nested: + child_a: uz + child_b: ve +objects: + session: session.yaml + lims: lims.yaml +``` + +The hardware object class specified by `` becomes the `class: Shanxi.Shanxi` key-value. +In the YAML format, the fully qualified class name must be specified. + +All the configuration property XML tags becomes entries in the YAML's `configuration` dictionary. + +Each `` tags becomes an entry in the YAML's `objects` dictionary. +The tag's `role` attribute is used as the entry's key name. +The tag's `href` attribute is converted to a config file name and specified as the entry's value. + +## `self["prop_name"]` expressions not supported + +Using index expressions to access configuration properties is no longer supported. +Use object's `config` attribute or `get_property()` methods. + +For example, following old-style code: + +```python +def init(self): + foo = self["foo"] +``` + +Needs to be converted to one of the following styles: + +```python +def init(self): + # use 'config' attribute to access configuration property + foo = self.config.foo +``` + +```python +def init(self): + # use 'get_property()' method to access configuration property + foo = self.get_property("foo") +``` + +## `@property` annotation for child objects + +Using `@property` annotated attribute to provide access to a child object is not supported. + +Consider this old-style code: + +```python +class Shanxi(HardwareObject): + def init(self): + self._session = self.get_object_by_role("session") + + @property + def session(self): + return self._session +``` + +This code provides access to its child hardware object `session` via the annotated `session` attribute. +This style does not work anymore. +Remove the annotated `session` attribute and assignment to `self._session` attribute. +Use the following configuration file. + +```yaml +class: Shanxi.Shanxi +objects: + session: session.yml +``` + +During the initialization of the `Shanxi` object, +the child object will be automatically assigned to the `session` attribute. +See [Accessing child objects](configuration_files.md#accessing-child-objects) for more details. + +## `name` parameter in `__init__` method + +When a hardware object is loaded using a YAML configuration file, it is created with the following code: + +```python +ClassName(name="role") +``` + +Thus, the hardware object's `__init__` method must accept the `name` parameter. +Below is an example that works with YAML configuration file: + +```python +class Shanxi(HardwareObject): + def __init__(self, name): + super().__init__(name) +``` From ad2c3ec429dfc1c020a39d1d4bec8861214a32b6 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 12 Aug 2024 18:18:18 +0200 Subject: [PATCH 22/53] 'emulate' get_object_by_role() access for YAML HWOBJs Make it possible to use get_object_by_role() on HWOBJs loaded with YAML config files, at least in some situations. This hack will make it easier to migrate to using YAML config files, as not all code invoking get_object_by_role() needs to be updated stright away. --- mxcubecore/BaseHardwareObjects.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/mxcubecore/BaseHardwareObjects.py b/mxcubecore/BaseHardwareObjects.py index 986c0a55ee..ed3597481e 100644 --- a/mxcubecore/BaseHardwareObjects.py +++ b/mxcubecore/BaseHardwareObjects.py @@ -530,6 +530,26 @@ def get_object_by_role(self, role: str) -> Union["HardwareObject", None]: f"{self.__class__.__name__}.get_object_by_role is deprecated. " f"Use attribute '{role}' to access this object." ) + + # + # A hack to emulate get_object_by_role() for objects loaded from YAML config files. + # + # When HWOBJ is loaded from YAML, we don't populate it's '_objects_by_role' dictionary, + # thus that normal code path to look-up and object by role does not work. + # + # However, objects are attached to the parent object via attribute assignment. Try accessing + # using that attribute. + # + try: + obj = getattr(self, role, None) + if obj is not None: + return obj + except AttributeError: + pass + + # + # Look-up object by role the old way. + # objects = [self] for curr in objects: From fcf12317b03f19c90b486812271d6e3eb6eab163 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Thu, 15 Aug 2024 15:45:01 +0200 Subject: [PATCH 23/53] handle missing YAML configure files Fix and issue where MXCuBE would not start if some of the specified YAML files could not be found. With this change, the behavior is the same as with missing XML files. An error message for the HWOBJ is added to the summary table, and the loading process continues. --- mxcubecore/HardwareRepository.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index 328b04d8c2..ab4e8c6410 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -203,7 +203,9 @@ def load_from_yaml( _container=result, _table=_table, ) - _instance.hardware_objects[f"/{hwobj.load_name}"] = hwobj + if hwobj: + # only add if we successfully loaded the object + _instance.hardware_objects[f"/{hwobj.load_name}"] = hwobj elif fext == ".xml": msg1 = "" time0 = time.time() From f1e0cffdac4e8596b180b0ac257792574b787619 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Tue, 27 Aug 2024 13:19:20 +0200 Subject: [PATCH 24/53] prevent re-loading same YAML config file twice When loading hardware objects from YAML configuration file, check if that file have been loaded earlier. Raise an error if we detect that same file is being reloaded. The aim is to enforce strict tree-like structure of hardware objects. This to avoid confusion of the situations when same hardware object is attached at different points. --- mxcubecore/HardwareRepository.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index ab4e8c6410..c7a694aa9b 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -196,6 +196,14 @@ def load_from_yaml( fname, fext = os.path.splitext(config_file) if fext in (".yaml", ".yml"): + fname = f"/{fname}" + + # check if we already loaded this configuration file + if _instance.hardware_objects.get(fname) is not None: + raise Exception( + f"Configuration file '{config_file}', referenced in '{configuration_file}, " + f"has been loaded earlier. Refusing to load it a second time." + ) hwobj = load_from_yaml( config_file, role=role1, @@ -205,7 +213,8 @@ def load_from_yaml( ) if hwobj: # only add if we successfully loaded the object - _instance.hardware_objects[f"/{hwobj.load_name}"] = hwobj + _instance.hardware_objects[fname] = hwobj + elif fext == ".xml": msg1 = "" time0 = time.time() From 3166090e6918826031ff1ee7936e056f23853775 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 19 Aug 2024 16:00:42 +0200 Subject: [PATCH 25/53] make HWR.beamline.acquisition_limit_values work again Add a 'acquisition_limit_values' proxy attribute to the Beamline HWOBJ. This way the limits can be accessed with: HWR.beamline.acquisition_limit_values This is how MXCuBE-web reads the limits. Let's support this style for a while, for backward compability reason. --- mxcubecore/HardwareObjects/Beamline.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/mxcubecore/HardwareObjects/Beamline.py b/mxcubecore/HardwareObjects/Beamline.py index 475e648d35..039650ee2c 100644 --- a/mxcubecore/HardwareObjects/Beamline.py +++ b/mxcubecore/HardwareObjects/Beamline.py @@ -88,9 +88,6 @@ class HOConfig(ConfiguredObject.HOConfig): # Dictionary-of-dictionaries of default acquisition parameters default_acquisition_parameters = {} - # Dictionary of acquisition parameter limits - acquisition_limit_values = {} - # int Starting run number for path_template run_number = 1 @@ -164,6 +161,15 @@ def _hwr_init_done(self): """ self._hardware_object_id_dict = self._get_id_dict() + @property + def acquisition_limit_values(self): + """ + adds a proxy attribute, so that the default acquisition limits can be accessed with: + + HWR.beamline.acquisition_limit_values + """ + return self.config.default_acquisition_parameters["acquisition_limit_values"] + def get_id(self, ho: HardwareObject) -> str: warn("Beamline.get_id is Deprecated. Use hwobj.id instead") return ho.id From 03594117ea7d339fde7d86324276e40166419402 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 12 Aug 2024 18:22:47 +0200 Subject: [PATCH 26/53] port AbstractDetector to use get_property() Update AbstractDetector class to access it's 'beam' setting with get_property(). Replaces self["beam"] expression, as it does not work when YAML config file is used. --- mxcubecore/HardwareObjects/abstract/AbstractDetector.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/mxcubecore/HardwareObjects/abstract/AbstractDetector.py b/mxcubecore/HardwareObjects/abstract/AbstractDetector.py index fe87069355..4a8cb52a8e 100644 --- a/mxcubecore/HardwareObjects/abstract/AbstractDetector.py +++ b/mxcubecore/HardwareObjects/abstract/AbstractDetector.py @@ -84,13 +84,10 @@ def __init__(self, name): self._metadata = {} def init(self): - """Initialise some common paramerters""" + """Initialise some common parameters""" super().init() - try: - self._metadata = dict(self["beam"].get_properties()) - except KeyError: - pass + self._metadata = self.get_property("beam", {}) self._distance_motor_hwobj = self.get_object_by_role("detector_distance") From 221f12a9668f269fb8e7e899f381e04660f1bf7b Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Tue, 13 Aug 2024 11:54:36 +0200 Subject: [PATCH 27/53] port beam releated HWOBJS to work with YAML Change code implementing Beam and Slits HWOBJs to work both with YAML and XML configure files. Add 'name' named attribute to __init__(), as it's required when loading from YAML. Renamed '_aperture', '_definer' and '_slits' attributes to 'aperture', 'definer' and 'slits', as this is the new style for attaching child HWOBJs. Added a backward compability hack, so that child HWOBJs attributes get populated when loading from XML configure file. --- .../HardwareObjects/abstract/AbstractBeam.py | 33 ++++-------------- .../HardwareObjects/abstract/AbstractSlits.py | 4 +-- .../HardwareObjects/mockup/BeamMockup.py | 34 +++++++++++++------ .../HardwareObjects/mockup/SlitsMockup.py | 4 +-- 4 files changed, 34 insertions(+), 41 deletions(-) diff --git a/mxcubecore/HardwareObjects/abstract/AbstractBeam.py b/mxcubecore/HardwareObjects/abstract/AbstractBeam.py index 5d7c0ed2c2..2091974873 100644 --- a/mxcubecore/HardwareObjects/abstract/AbstractBeam.py +++ b/mxcubecore/HardwareObjects/abstract/AbstractBeam.py @@ -58,9 +58,9 @@ class AbstractBeam(HardwareObject): def __init__(self, name): super().__init__(name) - self._aperture = None - self._slits = None - self._definer = None + self.aperture = None + self.slits = None + self.definer = None self._definer_type = None self._beam_size_dict = { @@ -93,27 +93,6 @@ def init(self): self._beam_position_on_screen = [0, 0] self._definer_type = self.get_property("definer_type") - @property - def aperture(self): - """ - Returns aperture hwobj - """ - return self._aperture - - @property - def slits(self): - """ - Returns slits hwobj - """ - return self._slits - - @property - def definer(self): - """ - Beam definer device, equipment like focusing optics, CRLs, and etc. - """ - return self._definer - def get_beam_divergence(self): """Get the beam divergence. Returns: @@ -184,10 +163,10 @@ def set_beam_size_shape(self, beam_width, beam_height, beam_shape): ) if beam_shape == BeamShape.RECTANGULAR: - self._slits.set_horizontal_gap(beam_width) - self._slits.set_vertical_gap(beam_height) + self.slits.set_horizontal_gap(beam_width) + self.slits.set_vertical_gap(beam_height) elif beam_shape == BeamShape.ELLIPTICAL: - self._aperture.set_diameter_size(beam_width) + self.aperture.set_diameter_size(beam_width) def get_beam_position_on_screen(self): """Get the beam position diff --git a/mxcubecore/HardwareObjects/abstract/AbstractSlits.py b/mxcubecore/HardwareObjects/abstract/AbstractSlits.py index 52eed56aa6..6fa9513039 100644 --- a/mxcubecore/HardwareObjects/abstract/AbstractSlits.py +++ b/mxcubecore/HardwareObjects/abstract/AbstractSlits.py @@ -31,12 +31,12 @@ class AbstractSlits(HardwareObject, object): __metaclass__ = abc.ABCMeta - def __init__(self, *args): + def __init__(self, name: str): warn( "AbstractSlits is deprecated. Use specific motors instead", DeprecationWarning, ) - HardwareObject.__init__(self, *args) + HardwareObject.__init__(self, name) self._value = [None, None] self._min_limits = [None, None] diff --git a/mxcubecore/HardwareObjects/mockup/BeamMockup.py b/mxcubecore/HardwareObjects/mockup/BeamMockup.py index f3c8b8a015..6bd9bf1dd9 100644 --- a/mxcubecore/HardwareObjects/mockup/BeamMockup.py +++ b/mxcubecore/HardwareObjects/mockup/BeamMockup.py @@ -53,22 +53,36 @@ def init(self): """Initialize hardware""" super().init() - self._aperture = self.get_object_by_role("aperture") - if self._aperture: + # + # backward compatibility hack to support loading from XML config file + # + # when loading from YAML configuration file, + # the attributes will be automatically set to the specified child HWOBJs + # + # when loading from XML, it does not happen, so fall back to get_object_by_role() + # + if self.aperture is None: + self.aperture = self.get_object_by_role("aperture") + + if self.slits is None: + self.slits = self.get_object_by_role("slits") + + if self.definer is None: + self.definer = self.get_object_by_role("definer") + + if self.aperture: _definer_type = "aperture" - self._aperture.connect("valueChanged", self.aperture_diameter_changed) + self.aperture.connect("valueChanged", self.aperture_diameter_changed) - self._slits = self.get_object_by_role("slits") - if self._slits: + if self.slits: _definer_type = "slits" - self._slits.connect("valueChanged", self.slits_gap_changed) + self.slits.connect("valueChanged", self.slits_gap_changed) - self._definer = self.get_object_by_role("definer") - if self._definer: + if self.definer: _definer_type = "definer" - self._definer.connect("valueChanged", self._re_emit_values) + self.definer.connect("valueChanged", self._re_emit_values) - self._definer_type = self.get_property("definer_type") or _definer_type + self._definer_type = self.get_property("definer_type", _definer_type) self._beam_position_on_screen = literal_eval( self.get_property("beam_position", "[318, 238]") diff --git a/mxcubecore/HardwareObjects/mockup/SlitsMockup.py b/mxcubecore/HardwareObjects/mockup/SlitsMockup.py index b050fa10f0..7c24d912f5 100644 --- a/mxcubecore/HardwareObjects/mockup/SlitsMockup.py +++ b/mxcubecore/HardwareObjects/mockup/SlitsMockup.py @@ -24,8 +24,8 @@ class SlitsMockup(AbstractSlits): - def __init__(self, *args): - AbstractSlits.__init__(self, *args) + def __init__(self, name: str): + AbstractSlits.__init__(self, name) def init(self): self._value = [0.05, 0.05] From 4f2c6841484e79d5cb2028f1910d0156df675b8c Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Wed, 14 Aug 2024 13:55:06 +0200 Subject: [PATCH 28/53] make some diffractometer related HWOBJs work with YAML Drop proxy attributes 'kappa' and 'kappa_phi' from GenericDiffractometer class. These interfere with attaching of sub-HWOBJs for diffractometer HWOBJ. Add named parameter 'name' to DiffractometerMockup.__init__(). This parameter is required when loading HWOBJ from YAML configuration file. --- .../HardwareObjects/GenericDiffractometer.py | 18 ------------------ .../mockup/DiffractometerMockup.py | 19 +++++++++++++++++-- 2 files changed, 17 insertions(+), 20 deletions(-) diff --git a/mxcubecore/HardwareObjects/GenericDiffractometer.py b/mxcubecore/HardwareObjects/GenericDiffractometer.py index 4b88d1891b..4e5f970052 100755 --- a/mxcubecore/HardwareObjects/GenericDiffractometer.py +++ b/mxcubecore/HardwareObjects/GenericDiffractometer.py @@ -545,24 +545,6 @@ def omega(self): """ return self.motor_hwobj_dict.get("phi") - @property - def kappa(self): - """kappa motor object - - Returns: - AbstractActuator - """ - return self.motor_hwobj_dict.get("kappa") - - @property - def kappa_phi(self): - """kappa_phi motor object - - Returns: - AbstractActuator - """ - return self.motor_hwobj_dict.get("kappa_phi") - @property def centring_x(self): """centring_x motor object diff --git a/mxcubecore/HardwareObjects/mockup/DiffractometerMockup.py b/mxcubecore/HardwareObjects/mockup/DiffractometerMockup.py index 30e0e15849..e53a0d2cda 100644 --- a/mxcubecore/HardwareObjects/mockup/DiffractometerMockup.py +++ b/mxcubecore/HardwareObjects/mockup/DiffractometerMockup.py @@ -36,11 +36,26 @@ class DiffractometerMockup(GenericDiffractometer): Descript. : """ - def __init__(self, *args): + def __init__(self, name): """ Descript. : """ - GenericDiffractometer.__init__(self, *args) + GenericDiffractometer.__init__(self, name) + + # child object slots + self.backlight = None + self.backlightswitch = None + self.beamstop_distance = None + self.focus = None + self.frontlight = None + self.frontlightswitch = None + self.kappa = None + self.kappa_phi = None + self.phi = None + self.phiy = None + self.phiz = None + self.sampx = None + self.sampy = None def init(self): """ From 0021b479df72e3a7929ef95553e80bc3cf5af839 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Wed, 14 Aug 2024 15:32:24 +0200 Subject: [PATCH 29/53] make some sample changer related HWOBJs work with YAML Make it possible to create SampleChangerMockup and AbstractSampleChanger derived HWOBJs with ClassName(name=foo) expressions. This is the expression used when loading from a YAML configuration file. --- .../HardwareObjects/abstract/AbstractSampleChanger.py | 6 ++---- mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/mxcubecore/HardwareObjects/abstract/AbstractSampleChanger.py b/mxcubecore/HardwareObjects/abstract/AbstractSampleChanger.py index 7a7a66bf64..7d4d783780 100644 --- a/mxcubecore/HardwareObjects/abstract/AbstractSampleChanger.py +++ b/mxcubecore/HardwareObjects/abstract/AbstractSampleChanger.py @@ -212,11 +212,9 @@ class SampleChanger(Container, HardwareObject): TASK_FINISHED_EVENT = "taskFinished" CONTENTS_UPDATED_EVENT = "contentsUpdated" - def __init__(self, type_, scannable, *args, **kwargs): + def __init__(self, type_, scannable, name): super().__init__(type_, None, type_, scannable) - if len(args) == 0: - args = (type_,) - HardwareObject.__init__(self, *args, **kwargs) + HardwareObject.__init__(self, name) self.state = -1 self.status = "" self._set_state(SampleChangerState.Unknown) diff --git a/mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py b/mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py index d25f151fef..375100e67b 100644 --- a/mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py +++ b/mxcubecore/HardwareObjects/mockup/SampleChangerMockup.py @@ -11,8 +11,8 @@ class SampleChangerMockup(AbstractSampleChanger.SampleChanger): NO_OF_BASKETS = 5 NO_OF_SAMPLES_IN_BASKET = 10 - def __init__(self, *args, **kwargs): - super(SampleChangerMockup, self).__init__(self.__TYPE__, False, *args, **kwargs) + def __init__(self, name): + super(SampleChangerMockup, self).__init__(self.__TYPE__, False, name) def init(self): self._selected_sample = -1 From 43c5f993018509ae5c03d51e88e9e878af9e4f7d Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Thu, 15 Aug 2024 09:55:03 +0200 Subject: [PATCH 30/53] remove proxy 'camera' attribute on SampleView HWOBJs The 'camera' attribute is now automagically set from the 'objects' settings in YAML/XML configure file. --- mxcubecore/HardwareObjects/SampleView.py | 1 - .../HardwareObjects/abstract/AbstractSampleView.py | 10 +--------- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/mxcubecore/HardwareObjects/SampleView.py b/mxcubecore/HardwareObjects/SampleView.py index b6202dd1fc..ba7edb2c0c 100644 --- a/mxcubecore/HardwareObjects/SampleView.py +++ b/mxcubecore/HardwareObjects/SampleView.py @@ -64,7 +64,6 @@ def __init__(self, name): def init(self): super(SampleView, self).init() - self._camera = self.get_object_by_role("camera") self._last_oav_image = None self.hide_grid_threshold = self.get_property("hide_grid_threshold", 5) diff --git a/mxcubecore/HardwareObjects/abstract/AbstractSampleView.py b/mxcubecore/HardwareObjects/abstract/AbstractSampleView.py index 69d972740a..bca5955d3e 100644 --- a/mxcubecore/HardwareObjects/abstract/AbstractSampleView.py +++ b/mxcubecore/HardwareObjects/abstract/AbstractSampleView.py @@ -36,7 +36,7 @@ class AbstractSampleView(HardwareObject): def __init__(self, name): super().__init__(name) - self._camera = None + self.camera = None self._focus = None self._zoom = None self._frontlight = None @@ -70,14 +70,6 @@ def save_scene_animation(self, filename, duration=1): duration (int): Duration time [s]. """ - @property - def camera(self): - """Get camera object. - Returns: - (AbstractCamera): Camera hardware object. - """ - return self._camera - @property def shapes(self): """Get shapes dict. From 6af0b18859a8d29643330d4cc4952664966d65a3 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Thu, 15 Aug 2024 09:59:30 +0200 Subject: [PATCH 31/53] refactor initialization code of MDCameraMockup The 'image_name' configuration property is not available in _init() method. Move set-up of 'static' attributes into __init__() method. Move set-up that requires access to configuration properties into init() method. --- mxcubecore/HardwareObjects/mockup/MDCameraMockup.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/mxcubecore/HardwareObjects/mockup/MDCameraMockup.py b/mxcubecore/HardwareObjects/mockup/MDCameraMockup.py index 7f6fa90d0e..f68d5c6373 100644 --- a/mxcubecore/HardwareObjects/mockup/MDCameraMockup.py +++ b/mxcubecore/HardwareObjects/mockup/MDCameraMockup.py @@ -19,23 +19,25 @@ class MDCameraMockup(BaseHardwareObjects.HardwareObject): def __init__(self, name): super().__init__(name) - def _init(self): self._format = "MPEG1" self.stream_hash = "abc123" self.udiffVER_Ok = False self.badimg = 0 self.pollInterval = 500 self.connected = False - self.image_name = self.get_property("image_name") - self.image = HWR.get_hardware_repository().find_in_repository(self.image_name) + + def init(self): + logging.getLogger("HWR").info("initializing camera object") + + image_name = self.get_property("image_name") + self.image = HWR.get_hardware_repository().find_in_repository(image_name) self.set_is_ready(True) self._video_stream_process = None self._current_stream_size = "0, 0" - def init(self): - logging.getLogger("HWR").info("initializing camera object") if self.get_property("interval"): self.pollInterval = self.get_property("interval") + self.stopper = False # self.polling_timer(self.pollInterval, self.poll) gevent.spawn(self.poll) From 6bf23251a5edb645fda02f55538a0f3cda63186d Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Thu, 15 Aug 2024 11:10:19 +0200 Subject: [PATCH 32/53] make BeamlineActions HWOBJs work with YAML Make it possible to create BeamlineActions derived HWOBJs with ClassName(name=foo) expressions. This is the expression used when loading from a YAML configuration file. --- mxcubecore/HardwareObjects/BeamlineActions.py | 4 ++-- mxcubecore/HardwareObjects/mockup/BeamlineActionsMockup.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/mxcubecore/HardwareObjects/BeamlineActions.py b/mxcubecore/HardwareObjects/BeamlineActions.py index c7243bbca8..0e6d1cb48f 100644 --- a/mxcubecore/HardwareObjects/BeamlineActions.py +++ b/mxcubecore/HardwareObjects/BeamlineActions.py @@ -164,8 +164,8 @@ def cmd_name(self): class BeamlineActions(HardwareObject): - def __init__(self, *args): - HardwareObject.__init__(self, *args) + def __init__(self, name): + HardwareObject.__init__(self, name) self._annotated_commands = [] self._annotated_command_dict = {} self._command_list = [] diff --git a/mxcubecore/HardwareObjects/mockup/BeamlineActionsMockup.py b/mxcubecore/HardwareObjects/mockup/BeamlineActionsMockup.py index c3211a9668..b814c18c2f 100644 --- a/mxcubecore/HardwareObjects/mockup/BeamlineActionsMockup.py +++ b/mxcubecore/HardwareObjects/mockup/BeamlineActionsMockup.py @@ -71,5 +71,4 @@ def combo_test2(self, data: StringLiteral) -> None: class BeamlineActionsMockup(BeamlineActions): - def __init__(self, *args): - super().__init__(*args) + pass From 7b0c4fb349609235cdeb90ecaad5913c10e4232d Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Wed, 21 Aug 2024 09:13:40 +0200 Subject: [PATCH 33/53] add detector_distance attribute to detector mockup HWOBJ When loading DetectorMockup hardware object from YAML configuration file, you'll get a warning if there is no 'detector_distance' attribute. --- mxcubecore/HardwareObjects/mockup/DetectorMockup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mxcubecore/HardwareObjects/mockup/DetectorMockup.py b/mxcubecore/HardwareObjects/mockup/DetectorMockup.py index 03008545d7..3632cfaddd 100644 --- a/mxcubecore/HardwareObjects/mockup/DetectorMockup.py +++ b/mxcubecore/HardwareObjects/mockup/DetectorMockup.py @@ -18,6 +18,9 @@ def __init__(self, name): """ AbstractDetector.__init__(self, name) + # 'slot' for child object + self.detector_distance = None + def init(self): """ Descript. : From 7004ff641ddcc8fc3a52c0c6edefbbbd871055f4 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 12:22:19 +0200 Subject: [PATCH 34/53] tests: remove test on HardwareObjectNode.__getattr__ method The HardwareObjectNode.__getattr__ method have been removed. --- test/pytest/test_base_hardware_objects.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/test/pytest/test_base_hardware_objects.py b/test/pytest/test_base_hardware_objects.py index 80654e0f10..bb7fe80bac 100644 --- a/test/pytest/test_base_hardware_objects.py +++ b/test/pytest/test_base_hardware_objects.py @@ -384,26 +384,6 @@ def test_len( # Check "len" matches expected count assert len(hw_obj_node) == count - def test_getattr(self, hw_obj_node: HardwareObjectNode): - """Test "__getattr__" method. - - Args: - hw_obj_node (HardwareObjectNode): Object instance. - """ - - # Attempt to access attribute starting with "__", should raise an exception - with pytest.raises(AttributeError): - hw_obj_node.__name - - # Check retrieving an attrubute that has been assigned to "_property_set" - hw_obj_node._property_set["test"] = 1 - assert getattr(hw_obj_node, "test") == 1 - - # Check attempting to access an attribute, - # before it has been assigned to "_property_set" - with pytest.raises(AttributeError): - getattr(hw_obj_node, "test2") - def test_setattr(self, hw_obj_node: HardwareObjectNode): """Test "__setattr__" method. From cfe658b974c80078a34d9e57a9c4c36237781282 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 12:28:05 +0200 Subject: [PATCH 35/53] tests: remove checks for outdated HardwareObjectNode.__setattr__() behaviour The logic for where the value of setattr() ends up works differenly now. Remove the test part that was checking the old behaviour. --- test/pytest/test_base_hardware_objects.py | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/test/pytest/test_base_hardware_objects.py b/test/pytest/test_base_hardware_objects.py index bb7fe80bac..6bcb15a058 100644 --- a/test/pytest/test_base_hardware_objects.py +++ b/test/pytest/test_base_hardware_objects.py @@ -398,19 +398,7 @@ def test_setattr(self, hw_obj_node: HardwareObjectNode): assert "test1" in hw_obj_node.__dict__.keys() assert getattr(hw_obj_node, "test1") == 1 assert "test1" not in hw_obj_node._property_set.keys() - - # Assign a key/value to "_property_set" - hw_obj_node._property_set["test2"] = 0 - - # Set a new value against the value we just added - setattr(hw_obj_node, "test2", 1) - - # Check that the key/value was not assigned to "__dict__" - assert "test2" not in hw_obj_node.__dict__.keys() - - # Check that the value returned is correct - assert getattr(hw_obj_node, "test2") == 1 - assert hw_obj_node._property_set["test2"] == 1 + assert hw_obj_node.test1 == 1 @pytest.mark.parametrize( "key", From a95a77fe7feb39b480962d704cf31bba30e59fce Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 12:33:35 +0200 Subject: [PATCH 36/53] tests: remove test on HardwareObjectNode.has_object() method The HardwareObjectNode.has_object() have been removed. --- test/pytest/test_base_hardware_objects.py | 38 ----------------------- 1 file changed, 38 deletions(-) diff --git a/test/pytest/test_base_hardware_objects.py b/test/pytest/test_base_hardware_objects.py index 6bcb15a058..d1af44c66c 100644 --- a/test/pytest/test_base_hardware_objects.py +++ b/test/pytest/test_base_hardware_objects.py @@ -747,44 +747,6 @@ def test_add_object( # Index key in "_objects_names" should point to last item in "_objects" assert _objects_names.index(name) == len(_objects) - 1 - @pytest.mark.parametrize( - ("name", "initial_obj_names", "in_names"), - ( - ("key1", ["key1", "key2", "key3"], True), - ("key4", ["key1", "key2", "key3"], False), - ), - ) - def test_has_object( - self, - mocker: "MockerFixture", - hw_obj_node: HardwareObjectNode, - name: str, - initial_obj_names: List[str], - in_names: bool, - ): - """Test "has_object" method. - - Args: - mocker (MockerFixture): Instance of the Pytest mocker fixture. - hw_obj_node (HardwareObjectNode): Object instance. - name (str): Name. - initial_obj_names (List[str]): Initial object names. - in_names (bool): Result expected from method. - """ - - # Patch "__objects_names" to test with known values - mocker.patch.object( - hw_obj_node, - "_HardwareObjectNode__objects_names", - new=initial_obj_names, - ) - - # Call method - res = hw_obj_node.has_object(object_name=name) - - # Check result matches expectations - assert res == in_names - @pytest.mark.parametrize("name", ("key1", "key2", "key3", "key4")) @pytest.mark.parametrize( ("initial_obj_names", "initial_objects"), From 1aa983e710cd7b512941368301fd6d5335f99b8c Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 15:13:03 +0200 Subject: [PATCH 37/53] tests: move print_log() method test to new class The print_log() method moved from HardwareObjectNode class to ConfiguredObject, update the test accordingly. --- test/pytest/test_base_hardware_objects.py | 86 +++++++++++------------ 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/test/pytest/test_base_hardware_objects.py b/test/pytest/test_base_hardware_objects.py index d1af44c66c..72475428d7 100644 --- a/test/pytest/test_base_hardware_objects.py +++ b/test/pytest/test_base_hardware_objects.py @@ -114,6 +114,49 @@ def test_configured_object_setup(self, configured_object: ConfiguredObject): ConfiguredObject, ) + @pytest.mark.parametrize( + "level", + ( + "debug", + "error", + "warning", + "info", + "flange", + ), + ) + def test_print_log( + self, + mocker: "MockerFixture", + configured_object: ConfiguredObject, + level: str, + ): + """Test "print_log" method. + + Args: + mocker (MockerFixture): Instance of the Pytest mocker fixture. + hw_obj_node (HardwareObjectNode): Object instance. + level (str): Logging level. + """ + + # Patch "logging.getLogger" to intercept calls + logger_patch = MagicMock(spec=Logger) + get_logger_patch = mocker.patch("logging.getLogger", return_value=logger_patch) + + _log_type = f"{level.upper()}_TEST" + _message = f"Test {level.capitalize()} Entry." + + # Call method, output is always going to be "None" + configured_object.print_log(log_type=_log_type, level=level, msg=_message) + + # All tests should make at least one call to patched "logging.getLogger" + get_logger_patch.assert_called_with(*(_log_type,)) + + logger_level_patch: Union[MagicMock, None] = getattr(logger_patch, level, None) + if logger_level_patch is not None: + # If the logging level exists, check that it was called with our message + logger_level_patch: MagicMock + logger_level_patch.assert_called_once_with(*(_message,)) + class TestPropertySet: """Run tests for "PropertySet" class""" @@ -1031,49 +1074,6 @@ def test_get_properties( # Call method and verify output matches initial values assert hw_obj_node.get_properties() == initial_properties - @pytest.mark.parametrize( - "level", - ( - "debug", - "error", - "warning", - "info", - "flange", - ), - ) - def test_print_log( - self, - mocker: "MockerFixture", - hw_obj_node: HardwareObjectNode, - level: str, - ): - """Test "print_log" method. - - Args: - mocker (MockerFixture): Instance of the Pytest mocker fixture. - hw_obj_node (HardwareObjectNode): Object instance. - level (str): Logging level. - """ - - # Patch "logging.getLogger" to intercept calls - logger_patch = MagicMock(spec=Logger) - get_logger_patch = mocker.patch("logging.getLogger", return_value=logger_patch) - - _log_type = f"{level.upper()}_TEST" - _message = f"Test {level.capitalize()} Entry." - - # Call method, output is always going to be "None" - hw_obj_node.print_log(log_type=_log_type, level=level, msg=_message) - - # All tests should make at least one call to patched "logging.getLogger" - get_logger_patch.assert_called_with(*(_log_type,)) - - logger_level_patch: Union[MagicMock, None] = getattr(logger_patch, level, None) - if logger_level_patch is not None: - # If the logging level exists, check that it was called with our message - logger_level_patch: MagicMock - logger_level_patch.assert_called_once_with(*(_message,)) - class TestHardwareObjectMixin: """Run tests for "HardwareObjectMixin" class""" From 485e364f04a2b4fcf02c1deb71b5e6e2a512824d Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 15:14:02 +0200 Subject: [PATCH 38/53] tests: use new method name of HardwareObjectNode._objects_names() The HardwareObjectNode.objects_names() method have been renamed to HardwareObjectNode._objects_names(), update the test accordingly. --- test/pytest/test_base_hardware_objects.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/pytest/test_base_hardware_objects.py b/test/pytest/test_base_hardware_objects.py index 72475428d7..a99bb7f8db 100644 --- a/test/pytest/test_base_hardware_objects.py +++ b/test/pytest/test_base_hardware_objects.py @@ -946,7 +946,7 @@ def test_objects_names( hw_obj_node: HardwareObjectNode, initial_obj_names: List[str], ): - """Test "objects_names" method. + """Test "_objects_names" method. Args: mocker (MockerFixture): Instance of the Pytest mocker fixture. @@ -962,7 +962,7 @@ def test_objects_names( ) # Call method and verify output matches initial values - assert hw_obj_node.objects_names() == initial_obj_names + assert hw_obj_node._objects_names() == initial_obj_names @pytest.mark.parametrize( ("name", "value", "output_value"), From 66099bdd1d1fe4df1afbf1bc6b2f73507c6e5e67 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 15:21:14 +0200 Subject: [PATCH 39/53] tests: update a couple of BaseHardwareObjects tests * the HardwareObjectYaml does not take 'name' named attribute anymore * method 'name()' has changed to attribute 'name' --- test/pytest/test_base_hardware_objects.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/test/pytest/test_base_hardware_objects.py b/test/pytest/test_base_hardware_objects.py index a99bb7f8db..b604683206 100644 --- a/test/pytest/test_base_hardware_objects.py +++ b/test/pytest/test_base_hardware_objects.py @@ -95,7 +95,7 @@ def hw_obj_yml() -> Generator[HardwareObjectYaml, None, None]: Generator[HardwareObjectYaml, None, None]: New object instance. """ - hw_obj_yml = HardwareObjectYaml(name="RootObject") + hw_obj_yml = HardwareObjectYaml("RootObject") yield hw_obj_yml @@ -928,9 +928,7 @@ def test_get_object_by_role( if role.lower() in hw_obj_node._objects_by_role: assert res == hw_obj_node._objects_by_role[role.lower()] elif not None in _objects and sub_obj_role: - assert isinstance(res, HardwareObject) and res.name() == "TestHWObj3" - # else: - # assert res is None + assert isinstance(res, HardwareObject) and res.name == "TestHWObj3" @pytest.mark.parametrize( "initial_obj_names", From a859ce838c5d03c29984cb01f6f2304dfca6d6ee Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 17:38:46 +0200 Subject: [PATCH 40/53] tests: fix procedure tests The 'mock_procedure' HWOBJ is attached directly to the beamline object. --- test/pytest/test_procedure.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/test/pytest/test_procedure.py b/test/pytest/test_procedure.py index 1bf7df1de6..da076299eb 100644 --- a/test/pytest/test_procedure.py +++ b/test/pytest/test_procedure.py @@ -6,7 +6,7 @@ def test_procedure_init(beamline): assert ( - beamline.config.mock_procedure is not None + beamline.mock_procedure is not None ), "MockProcedure hardware objects is None (not initialized)" # The methods are defined with abc.abstractmethod which will raise # an exception if the method is not defined. So there is no need to test for @@ -15,17 +15,17 @@ def test_procedure_init(beamline): def test_procedure_start(beamline): data = procedure_model.MockDataModel(**{"exposure_time": 5}) - beamline.config.mock_procedure.start(data) + beamline.mock_procedure.start(data) gevent.sleep(1) - assert beamline.config.mock_procedure.state == ProcedureState.BUSY - beamline.config.mock_procedure.wait() - assert beamline.config.mock_procedure.state == ProcedureState.READY + assert beamline.mock_procedure.state == ProcedureState.BUSY + beamline.mock_procedure.wait() + assert beamline.mock_procedure.state == ProcedureState.READY def test_procedure_stop(beamline): data = procedure_model.MockDataModel(**{"exposure_time": 5}) - beamline.config.mock_procedure.start(data) + beamline.mock_procedure.start(data) gevent.sleep(1) - assert beamline.config.mock_procedure.state == ProcedureState.BUSY - beamline.config.mock_procedure.stop() - assert beamline.config.mock_procedure.state == ProcedureState.READY + assert beamline.mock_procedure.state == ProcedureState.BUSY + beamline.mock_procedure.stop() + assert beamline.mock_procedure.state == ProcedureState.READY From fbaf2cb58e55d6a978569a5cbbf4588433d1918e Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 17:44:26 +0200 Subject: [PATCH 41/53] tests: update test YAML configs format Use the new YAML configuration file format for the files used by the tests. --- mxcubecore/configuration/mockup/procedure-mockup.yml | 5 +---- mxcubecore/configuration/mockup/test/beamline_config.yml | 8 +++----- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/mxcubecore/configuration/mockup/procedure-mockup.yml b/mxcubecore/configuration/mockup/procedure-mockup.yml index 4df2475f85..4f750f9429 100644 --- a/mxcubecore/configuration/mockup/procedure-mockup.yml +++ b/mxcubecore/configuration/mockup/procedure-mockup.yml @@ -1,6 +1,3 @@ %YAML 1.2 --- - -# The class to initialise, and init parameters -_initialise_class: - class: mxcubecore.HardwareObjects.mockup.ProcedureMockup.ProcedureMockup +class: mxcubecore.HardwareObjects.mockup.ProcedureMockup.ProcedureMockup diff --git a/mxcubecore/configuration/mockup/test/beamline_config.yml b/mxcubecore/configuration/mockup/test/beamline_config.yml index f5d6a7752d..2d12cb3622 100644 --- a/mxcubecore/configuration/mockup/test/beamline_config.yml +++ b/mxcubecore/configuration/mockup/test/beamline_config.yml @@ -2,10 +2,8 @@ --- # The class to initialise, and init parameters -_initialise_class: - class: mxcubecore.HardwareObjects.Beamline.Beamline - # Further key-value pairs here will be passed to the class init - # mode: devel +class: + mxcubecore.HardwareObjects.Beamline.Beamline # objects # @@ -15,7 +13,7 @@ _initialise_class: # NBNB some objects that do not currently have their own config files # would need those added (e.g. the centring methods) # -_objects: +objects: # The !!o0map and the lines starting with '- ' give you an *ordered* dictionary # And thus a reproducible loading order !!omap From 08662951e7f7c91310623dcda1c350671d700c46 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 16 Aug 2024 17:52:27 +0200 Subject: [PATCH 42/53] tests: fix a command container test The 'name' attribute have been replaced by 'id'. --- test/pytest/test_command_container.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/pytest/test_command_container.py b/test/pytest/test_command_container.py index a543d5ea88..c0630ea8f2 100644 --- a/test/pytest/test_command_container.py +++ b/test/pytest/test_command_container.py @@ -836,8 +836,8 @@ def test_get_channel_object( optional (bool): Whether an error should be logged where no result is returned. """ - # Patch "name" to test in isolation - mocker.patch.object(cmd_container, "name", create=True, return_value="test") + # Patch "id" to test in isolation + mocker.patch.object(cmd_container, "id", create=True, return_value="test") # Patch "__channels" with known values to test mocker.patch.object( From e06cb357905d9a7034f75f284533fb0869152c41 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Tue, 3 Sep 2024 16:38:32 +0200 Subject: [PATCH 43/53] tests: update MAXIV MachInfo tests Update fixture code of MAXIV MachInfo HWOBJ tests. Replace calls to set_property() with direct creation of _config attribute. Using set_property() does not work anymore. --- test/pytest/test_hwo_maxiv_mach_info.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/test/pytest/test_hwo_maxiv_mach_info.py b/test/pytest/test_hwo_maxiv_mach_info.py index 28de2c8d56..91f27c2ee5 100644 --- a/test/pytest/test_hwo_maxiv_mach_info.py +++ b/test/pytest/test_hwo_maxiv_mach_info.py @@ -78,14 +78,12 @@ def mach_info(): dev_ctx.start() mach_info = MachInfo("/machine_info") - mach_info.set_property( - "parameters", - "['current', 'fillmode', 'message', 'lifetime', 'injection', 'status']", - ) - mach_info.set_property( - "mach_info", dev_ctx.get_device_access("test/device/billboard") + + mach_info._config = mach_info.HOConfig( + mach_info=dev_ctx.get_device_access("test/device/billboard"), + current=dev_ctx.get_device_access("test/device/dcct"), + parameters="['current', 'fillmode', 'message', 'lifetime', 'injection', 'status']", ) - mach_info.set_property("current", dev_ctx.get_device_access("test/device/dcct")) # listen for 'valueChanged' signal signal_sent = Event() From 1ba2bc2f252aec0f452e8d19245e1d168f010b28 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Wed, 4 Sep 2024 14:37:30 +0200 Subject: [PATCH 44/53] make a couple of mock HWOBJs work with YAML Make it possible to create XRFMockup and CatsMaintMockup derived HWOBJs with ClassName(name=foo) expressions. This is the expression used when loading from a YAML configuration file. --- mxcubecore/HardwareObjects/mockup/CatsMaintMockup.py | 4 ++-- mxcubecore/HardwareObjects/mockup/XRFMockup.py | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/mxcubecore/HardwareObjects/mockup/CatsMaintMockup.py b/mxcubecore/HardwareObjects/mockup/CatsMaintMockup.py index 930ef537a1..3bf12ad7d5 100644 --- a/mxcubecore/HardwareObjects/mockup/CatsMaintMockup.py +++ b/mxcubecore/HardwareObjects/mockup/CatsMaintMockup.py @@ -43,8 +43,8 @@ class CatsMaintMockup(HardwareObject): BESSY BL14.1 installation with 3 lids """ - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, name): + super().__init__(name) self._state = "READY" self._running = 0 diff --git a/mxcubecore/HardwareObjects/mockup/XRFMockup.py b/mxcubecore/HardwareObjects/mockup/XRFMockup.py index aa998368a3..9c946a5a61 100644 --- a/mxcubecore/HardwareObjects/mockup/XRFMockup.py +++ b/mxcubecore/HardwareObjects/mockup/XRFMockup.py @@ -11,6 +11,9 @@ class XRFMockup(HardwareObject): + def __init__(self, name): + super().__init__(name) + def init(self): self.ready_event = gevent.event.Event() self.spectrumInfo = {} From 666e4dee66c55b5d7930e4d87a6c22a0eb1f9d78 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 16 Sep 2024 12:11:54 +0200 Subject: [PATCH 45/53] docs: add section on removed set_property() Adds a section on removed set_property() method to 'YAML configuration migration' guide. --- docs/source/dev/yaml_conf_migration.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/source/dev/yaml_conf_migration.md b/docs/source/dev/yaml_conf_migration.md index 8587c9eae8..474a3a457e 100644 --- a/docs/source/dev/yaml_conf_migration.md +++ b/docs/source/dev/yaml_conf_migration.md @@ -186,3 +186,11 @@ class Shanxi(HardwareObject): def __init__(self, name): super().__init__(name) ``` + +## `set_property()` method removed + +The hardware object method `set_property()` has been removed. +It is no longer possible to set hardware object configuration properties from python code. +For static properties, move them to the hardware object configuration file. +If your code is setting properties dynamically, +you need to refactor the code to not rely on this deprecated feature. From 1c741e4795e9a381c67e0274b915d701f4d9c762 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Thu, 12 Sep 2024 14:29:22 +0200 Subject: [PATCH 46/53] port CentringMath HWOBJ to work with the yaml config API changes Don't use [] expression to read config properties. Renamed attributes self.cameraAxes and self.gonioAxes, to self._camera_axes and self._gonio_axes, as they now collide with the 'auto created' properties on the ConfiguredObject superclass. Use calls to self.get_property() when populating these attributes. --- mxcubecore/HardwareObjects/CentringMath.py | 80 +++++++++++++--------- 1 file changed, 49 insertions(+), 31 deletions(-) diff --git a/mxcubecore/HardwareObjects/CentringMath.py b/mxcubecore/HardwareObjects/CentringMath.py index 5f6b825392..dcac03be95 100644 --- a/mxcubecore/HardwareObjects/CentringMath.py +++ b/mxcubecore/HardwareObjects/CentringMath.py @@ -12,36 +12,54 @@ class CentringMath(Procedure): CentringMath procedure """ + def __init__(self, name: str): + super().__init__(name) + self._gonio_axes = [] + self._camera_axes = [] + def init(self): """ Ggonio axes definitions are static motorHO is expected to have get_value() that returns coordinate in mm """ - self.motorConstraints = [] - self.gonioAxes = [] - for axis in self["gonioAxes"]: - self.gonioAxes.append( - { - "type": axis.type, - "direction": eval(axis.direction), - "motor_name": axis.motorname, + + def get_axis_property(property_name: str) -> list: + # + # get 'axis' key from the specified config property + # + # treat the property as optional, if not specified, + # return an empty list + # + return self.get_property(property_name, {}).get("axis", []) + + def get_gonio_axes(): + for axis in get_axis_property("gonioAxes"): + yield { + "type": axis["type"], + "direction": eval(axis["direction"]), + "motor_name": axis["motorname"], "motor_HO": HWR.get_hardware_repository().get_hardware_object( - axis.motorHO + axis["motorHO"] ), } - ) + + def get_camera_axes(): + for axis in get_axis_property("cameraAxes"): + yield { + "axis_name": axis["axisname"], + "direction": eval(axis["direction"]), + } + + self.motorConstraints = [] + self._gonio_axes = list(get_gonio_axes()) """ This version is lacking video microscope object. Therefore we model only static camera axes directions, but no camera axes scaling or center - which are dynamic. Therefore, camera coordinates are relative, in mm. """ - self.cameraAxes = [] - for axis in self["cameraAxes"]: - self.cameraAxes.append( - {"axis_name": axis.axisname, "direction": eval(axis.direction)} - ) + self._camera_axes = list(get_camera_axes()) self.mI = numpy.diag([1.0, 1.0, 1.0]) # identity matrix self.calibrate() @@ -95,13 +113,13 @@ def centeredPosition(self, return_by_name=False): for l in range(0, self.translationAxesCount): for i in range(0, len(self.centringDataMatrix)): - for k in range(0, len(self.cameraAxes)): + for k in range(0, len(self._camera_axes)): V[l] += ( self.centringDataTensor[i][l][k] * self.centringDataMatrix[i][k] ) for m in range(0, self.translationAxesCount): for i in range(0, len(self.centringDataMatrix)): - for k in range(0, len(self.cameraAxes)): + for k in range(0, len(self._camera_axes)): M[l][m] += ( self.centringDataTensor[i][l][k] * self.centringDataTensor[i][m][k] @@ -131,10 +149,10 @@ def apply_constraints(self, M, tau): def factor_matrix(self): # This should be connected to goniostat rotation datum update, with F globalized - F = numpy.zeros(shape=(self.translationAxesCount, len(self.cameraAxes))) + F = numpy.zeros(shape=(self.translationAxesCount, len(self._camera_axes))) R = self.mI j = 0 - for axis in self.gonioAxes: # skip base gonio axis + for axis in self._gonio_axes: # skip base gonio axis if axis["type"] == "rotation": Ra = self.rotation_matrix( axis["direction"], axis["motor_HO"].get_value() @@ -143,7 +161,7 @@ def factor_matrix(self): elif axis["type"] == "translation": f = numpy.dot(R, axis["direction"]) k = 0 - for camera_axis in self.cameraAxes: + for camera_axis in self._camera_axes: F[j][k] = numpy.dot(f, camera_axis["direction"]) k += 1 j += 1 @@ -151,7 +169,7 @@ def factor_matrix(self): def calibrate(self): count = 0 - for axis in self.gonioAxes: # make first gonio rotation matrix for base axis + for axis in self._gonio_axes: # make first gonio rotation matrix for base axis if axis["type"] == "rotation": d = axis["direction"] axis["mT"] = numpy.outer(d, d) @@ -163,7 +181,7 @@ def calibrate(self): count += 1 self.translationAxesCount = count count = 0 - for axis in self.cameraAxes: + for axis in self._camera_axes: axis["index"] = count count += 1 @@ -179,7 +197,7 @@ def rotation_matrix(self, dir, angle): def translation_datum(self): vector = [] - for axis in self.gonioAxes: + for axis in self._gonio_axes: if axis["type"] == "translation": vector.append(axis["motor_HO"].get_value()) return vector @@ -187,7 +205,7 @@ def translation_datum(self): def centred_positions_to_vector(self, centrings_dictionary): vector = numpy.zeros(shape=(self.translationAxesCount)) index = 0 - for axis in self.gonioAxes: + for axis in self._gonio_axes: if axis["type"] == "translation": motname = axis["motor_name"] if centrings_dictionary[motname] is not None: @@ -200,7 +218,7 @@ def centred_positions_to_vector(self, centrings_dictionary): def vector_to_centred_positions(self, vector, return_by_name=False): dic = {} index = 0 - for axis in self.gonioAxes: + for axis in self._gonio_axes: if axis["type"] == "translation": if return_by_name: dic[axis["motor_name"]] = vector[index] @@ -211,16 +229,16 @@ def vector_to_centred_positions(self, vector, return_by_name=False): def camera_coordinates_to_vector(self, camera_coordinates_dictionary): vector = [] - for index in range(0, len(self.cameraAxes)): + for index in range(0, len(self._camera_axes)): vector.append( - camera_coordinates_dictionary[self.cameraAxes[index]["axis_name"]] + camera_coordinates_dictionary[self._camera_axes[index]["axis_name"]] ) return vector def vector_to_camera_coordinates(self, vector): dic = {} index = 0 - for axis in self.cameraAxes: + for axis in self._camera_axes: dic[axis["axis_name"]] = vector[index] index += 1 return dic @@ -228,7 +246,7 @@ def vector_to_camera_coordinates(self, vector): def appendMotorConstraint(self, motor_HO, position): index = 0 self.motorConstraints = [] - for axis in self.gonioAxes: + for axis in self._gonio_axes: if axis["type"] == "translation" and motor_HO is axis["motor_HO"]: index += 1 self.motorConstraints.append( @@ -240,10 +258,10 @@ def camera2alignmentMotor(self, motor_HO, camxy): # motor_HO must reference an ALIGNMENT motor! # finds a projection of camera vector {"X":x,"Y":y} onto a motor axis of a # motor_HO - for axis in self.gonioAxes: + for axis in self._gonio_axes: if axis["type"] == "translation" and motor_HO is axis["motor_HO"]: res = 0.0 - for camaxis in self.cameraAxes: + for camaxis in self._camera_axes: res = ( res + numpy.dot(axis["direction"], camaxis["direction"]) From f392ad590328f242320749f6857e4ace8e10997c Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Thu, 3 Oct 2024 10:07:17 +0200 Subject: [PATCH 47/53] export YAML configuration to '.yaml' files When 'export yaml configuration' option is invoked, write configurations to '*.yaml' files. This is the cannonical file extension for YAML files. --- mxcubecore/HardwareRepository.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index c7a694aa9b..c7d6317a18 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -275,7 +275,7 @@ def write_yaml(data, file_name: str): objects = result["objects"] = {} for role, obj in objects_by_role.items(): try: - objects[role] = "%s.yml" % obj.id + objects[role] = "%s.yaml" % obj.id except: logging.getLogger("HWR").exception("") @@ -284,7 +284,7 @@ def write_yaml(data, file_name: str): if tag not in objects_by_role: config[tag] = val - write_yaml(result, "%s.yml" % hwobj.id) + write_yaml(result, "%s.yaml" % hwobj.id) def _attach_xml_objects(yaml_export_directory: Optional[Path], container, hwobj, role): From 043d2e0609e64bb2bb435be6cd9d3ad24ea88fd8 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Tue, 15 Oct 2024 13:28:56 +0200 Subject: [PATCH 48/53] docs: add section on deprecated tag Adds section ' tag no longer supported' to 'XML to YAML config migration document'. --- docs/source/dev/yaml_conf_migration.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/docs/source/dev/yaml_conf_migration.md b/docs/source/dev/yaml_conf_migration.md index 474a3a457e..e674fd6cb5 100644 --- a/docs/source/dev/yaml_conf_migration.md +++ b/docs/source/dev/yaml_conf_migration.md @@ -194,3 +194,28 @@ It is no longer possible to set hardware object configuration properties from py For static properties, move them to the hardware object configuration file. If your code is setting properties dynamically, you need to refactor the code to not rely on this deprecated feature. + +## `` tag no longer supported + +Previously it was possible to configure a hardware object's command and channels using this style: + +```xml + + some/tango/device + Open + State + +``` + +Above the tango device for commands and channels is specified with `` tag. +The `` tag is no longer supported and is ignored. +The tango device must be specified in each individual `` and `` tag using the `tangoname` attribute. + +The above example should be converted to the following style: + +```xml + + Open + State + +``` From 28f6cf669ea5b94073c5600fadbf07bbe488f4a4 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 14 Oct 2024 09:15:01 +0200 Subject: [PATCH 49/53] restructure slightly the docs strings in CommandContainer module This way, it's possible to link to generated API documentation for CommandContainer and ChannelObject classes. Without this change, sphix refuses to create a link for CommandContainer and ChannelObject classes, from other section of the documentation. --- mxcubecore/CommandContainer.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/mxcubecore/CommandContainer.py b/mxcubecore/CommandContainer.py index 306e660065..35899aaf97 100644 --- a/mxcubecore/CommandContainer.py +++ b/mxcubecore/CommandContainer.py @@ -18,15 +18,6 @@ # You should have received a copy of the GNU Lesser General Public License # along with MXCuBE. If not, see . -"""CommandContainer module - -Classes: -- CommandContainer, a special mixin class to be used with -Hardware Objects. It defines a container -for command launchers and channels (see Command package). -- C*Object, command launcher & channel base class -""" - from __future__ import absolute_import import logging @@ -59,6 +50,8 @@ class ConnectionError(Exception): class CommandObject: + """Command launcher base class""" + def __init__(self, name: str, username: Optional[str] = None, **kwargs) -> None: """ Args: @@ -178,6 +171,8 @@ def is_connected(self) -> bool: class ChannelObject: + """Channel base class""" + def __init__(self, name: str, username: Optional[str] = None, **kwargs) -> None: """ Args: From 7f71d38223589510492a9872d522d9e4fcdd4c2e Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 27 Sep 2024 14:44:55 +0200 Subject: [PATCH 50/53] add docs on commands and channels YAML configuration format Adds a section the describes how to configure hardware object's commands and channels using YAML configuration files. Documents the format for Tango, exporter and EPICS protocols. --- docs/source/dev/commands_channels.md | 297 +++++++++++++++++++++++++ docs/source/dev/configuration_files.md | 23 +- docs/source/dev/index.rst | 11 +- 3 files changed, 322 insertions(+), 9 deletions(-) create mode 100644 docs/source/dev/commands_channels.md diff --git a/docs/source/dev/commands_channels.md b/docs/source/dev/commands_channels.md new file mode 100644 index 0000000000..51d0878c30 --- /dev/null +++ b/docs/source/dev/commands_channels.md @@ -0,0 +1,297 @@ +# Commands and Channels + +The mxcubecore provides a hardware object-level abstraction +for communicating using various flavors of control system protocols. +A hardware object can utilize instances of +[`CommandObject`](#mxcubecore.CommandContainer.CommandObject) and +[`ChannelObject`](#mxcubecore.CommandContainer.ChannelObject) objects. +These objects provide a uniform API for accessing a specific control system. +Mxcubecore provides support for using protocols such as +[_Tango_](https://www.tango-controls.org/), +[_EPICS_](https://docs.epics-controls.org), +_exporter_ and more. + +The `CommandObject` and `ChannelObject` objects can be created using the `add_command()` and `add_channel()` methods of hardware objects. +Another option is to specify them in the hardware object's configuration file. +If `CommandObject` and `ChannelObject` are specified in the configuration file, +the specified objects will be automatically created during hardware object initialization. + +## Configuration files format + +The general format for specifying `CommandObject` and `ChannelObject` objects is as follows: + +```yaml +class: +: # protocol in use, tango / exporter / epics / etc + : # tango device / exporter address / EPICS prefix / etc + commands: + : # command's MXCuBE name + : + : + channels: + : # channel's MXCuBE Name + : + : +``` + +The `CommandObject` and `ChannelObject` specification are grouped by the protocol they are using. +Each protocol have its own dedicated section in the configuration file. +The semantics for the protocol are similar but protocol-specific, see below for details. + +Currently, the following protocols can be configured using YAML configuration files: + + - [Tango](#tango-protocol) + - [exporter](#exporter-protocol) + - [EPICS](#epics-protocol) + +## Tango Protocol + +The format for specifying _tango_ `CommandObject` and `ChannelObject` objects is as follows: + +```yaml +class: +tango: + : + commands: + : + : + : + : + channels: + : + : + : + : + : +``` + +`` specifies the tango device to use. +Multiple `` sections can be specified, in order to use different tango devices. +Each `` contains optional `commands` and `channels` sections. +These sections specify `CommandObject` and `ChannelObject` object to create using the `` tango device. + +### Commands + +`commands` is a dictionary where each key specifies a `CommandObject` object. +The key defines the MXCuBE name for the command. +The values specify an optional dictionary with configuration properties for the `CommandObject` object. +The following configuration properties are supported: + +| property | purpose | default | +|----------|--------------------|---------------------| +| name | tango command name | MXCuBE command name | + +### Channels + +`channels` is a dictionary where each key specifies a `ChannelObject` object. +The key defines the MXCuBE name for the channel. +The values specify an optional dictionary with configuration properties for the `ChannelObject` object. +The following configuration properties are supported: + +| property | purpose | default | +|----------------|------------------------------------|---------------------| +| attribute | tango attribute name | MXCuBE channel name | +| polling_period | polling periodicity, milliseconds | polling is disabled | +| timeout | tango device timeout, milliseconds | 10000 | + +By default, a tango `ChannelObject` object will use tango attribute change event, in order to receive new attribute values. +For this to work, the tango device must send the change events for the attribute. +For cases where such events are not sent, the attribute polling can be enabled. +If `polling_period` property is specified, MXCuBE will poll the tango attribute with specified periodicity. + +### Example + +Below is an example of a hardware object that specifies Tango commands and channels. + +```yaml +class: MyTango +tango: + some/tango/device: + commands: + Open: + Close: + Reset: + name: Reboot + channels: + State: + Volume: + attribute: currentVolume + polling_period: 1024 +``` + +In the above example, commands `Open`, `Close` and `Reset` as well as channels `State` and `Volume` are configured. +All command and channel objects are bound to the commands and attributes of the _some/tango/device_ tango device. + +`Open` and `Close` commands are bound to _Open_ and _Close_ Tango commands. +The `Reset` has a configuration property that binds it to _Reboot_ tango command. + +The `State` channel will be mapped to _State_ attribute of the Tango device. +Its value will be updated via Tango change events. + +The `Volume` channel will be mapped to the _currentVolume_ attribute of the tango device. +The _currentVolume_ attribute's value will be polled every 1024 milliseconds. + +## Exporter Protocol + +The format for specifying _exporter_ `CommandObject` and `ChannelObject` objects is as follows: + +```yaml +class: +exporter: + : + commands: + : + : + : + : + channels: + : + : + : + : +``` + +`` specifies the exporter address to use. +Multiple `` sections can be specified to use devices at different addresses. +Each `` contains optional `commands` and `channels` sections. +These sections specify `CommandObject` and `ChannelObject` objects to create using the `` tango device. + +`` specifies the exporter's host and port number. +It has the following format: `:`. +`` is the host name or IP address to use. +`` is the TCP port number to use. +Note that, due to YAML parsing rules, you need to use quotes when specifying the exporter address. +Below is an example of an exporter address that can be used in a YAML configuration file: + +``` +"foo.example.com:9001" +``` + +### Commands + +`commands` is a dictionary where each key specifies a `CommandObject` object. +The key defines the MXCuBE name for the command. +The values specify an optional dictionary with configuration properties for the `CommandObject` object. +The following configuration properties are supported: + +| property | purpose | default | +|----------|-----------------------|---------------------| +| name | exporter command name | MXCuBE command name | + +### Channels + +`channels` is a dictionary where each key specifies a `ChannelObject` object. +The key defines the MXCuBE name for the channel. +The values specify an optional dictionary with configuration properties for the `ChannelObject` object. +The following configuration properties are supported: + +| property | purpose | default | +|-----------|--------------------------|---------------------| +| attribute | exporter attribute name | MXCuBE channel name | + +### Example + +Below is an example of a hardware object that specifies exporter commands and channels. + +```yaml +class: MyExporter +exporter: + "foo.example.com:9001": + commands: + Open: + Close: + Reset: + name: Reboot + channels: + State: + Volume: + attribute: currentVolume +``` + +In the above example, commands `Open`, `Close` and `Reset` as well as `State` and `Volume` channels are configured. +All command and channel objects are bound to the exporter host _foo.example.com_ at port _9001_. + +`Open` and `Close` commands are bound to _Open_ and _Close_ exporter commands. +The `Reset` has a configuration property that binds it to the _Reboot_ exporter command. + +The `State` channel will be mapped to _State_ exporter attribute. +The `Volume` channel will be mapped to _currentVolume_ exporter attribute. + +## EPICS Protocol + +The format for specifying _EPICS_ `ChannelObject` objects is as follows: + +```yaml +class: EpicsCommunicator +epics: + : + channels: + : + : + : + : + : +``` + +`` specifies the EPICS PV prefix to use for that section. +Multiple `` sections can be specified, in case not all channels share a common prefix. +Each `` contains a `channels` section, which specifies `ChannelObject` objects to create. + +It is also possible to use the empty string, `""`, as the prefix. +This is useful in cases where none of the channels share a common prefix. +See [below](#pv-names) for details on how channel PV names are determined. + +### Channels + +`channels` is a dictionary where each key specifies a `ChannelObject` object. +The key defines the MXCuBE name for the channel. +The values specify an optional dictionary with configuration properties for the `ChannelObject` object. +The following configuration properties are supported: + +| property | purpose | default | +|----------------|-----------------------|---------------------| +| suffix | PV name suffix | MXCuBE channel name | +| polling_period | polling periodicity | | + +#### PV names + +The PV name of a channel is determined by concatenating its section's prefix and the specified `suffix`. +If no suffix is specified, the channel's MXCuBE name is used in place of the `suffix`. +Observe an example configuration below: + +```yaml +class: EpicsCommunicator +epics: + "FOO:B:": + channels: + State: + suffix: pv_1.STAT + Vol: + suffix: volume.VAL + Freq: +``` + +Her we have an `FOO:B:` prefix specified, with channels `State`, `Vol` and `Freq`. +The `State` channel will use `FOO:B:pv_1.STAT` PV name, specified by section's prefix and the `suffix` configuration property. +The `Vol` channel's PV name will be`FOO:B:volume.VAL`. +The `Freq` channel's PV name becomes `FOO:B:Freq`, specified by section's prefix and channel's MXCuBE name. + +### Example + +Below is an example of a hardware object that specifies EPICS channels. + +```yaml +class: EpicsCommunicator +epics: + "MNC:B:PB04.": + channels: + State: + Volume: + suffix: vlm + polling_period: 512 +``` + +In the above example channels `State` and `Volume` are configured. +The `State` channel will be mapped to PV name _MNC:B:PB04.State_. +The `Volume` channel will be mapped to PV name _MNC:B:PB04.vlm_. +For `Volume` channel, polling will be enabled. diff --git a/docs/source/dev/configuration_files.md b/docs/source/dev/configuration_files.md index 5d68763e7e..4c403e4e01 100644 --- a/docs/source/dev/configuration_files.md +++ b/docs/source/dev/configuration_files.md @@ -59,18 +59,27 @@ and an `init()` function that is executed after configured parameters and contai Below is an example YAML configuration file: ```yaml -class: ISPyBClientMockup.ISPyBClientMockup +class: Orkhon.Erdenet configuration: - base_result_url: https://your.limsresults.org - login_type: proposal + altai: big + choir: 52 objects: - lims_rest: lims_rest.yaml + darkhan: darkhan.yaml session: session.yaml +tango: + "some/tango/device": + commands: + Open: + Close: + channels: + State: ``` -This file specifies a hardware object, which is an instance of the `ISPyBClientMockup` class. -That object will have two configuration properties `base_result_url` and `login_type`. -Two child objects with roles `lims_rest` and `session` will be loaded from the specified configuration files. +This file specifies a hardware object, which is an instance of the `Erdenet` class from `Orkhon` module. +That object will have two configuration properties `altai` and `choir`. +Two child objects with roles `darkhan` and `session` will be loaded from the specified configuration files. +The hardware object will also have access to command objects `Open` and `Close`, and a `State` channel object. +See [Commands and Channels](commands_channels.md) section for details on how to specify command and channel bindings. ### Accessing configuration properties diff --git a/docs/source/dev/index.rst b/docs/source/dev/index.rst index 468358017d..d30dd9c346 100644 --- a/docs/source/dev/index.rst +++ b/docs/source/dev/index.rst @@ -12,7 +12,14 @@ Developer documentation :maxdepth: 2 :titlesonly: - * - + contributing + docs + configuration_files + commands_channels + queue + abstract_classes + api + json-schema-generated-user-interface + yaml_conf_migration .. # EOF From ed87ac84eb83412b66ea95d19dd590e8df9f282e Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Fri, 27 Sep 2024 16:43:00 +0200 Subject: [PATCH 51/53] adds support for configuring commands and channels using YAML Adds support for 'tango', 'exporter' and 'epics' sections to YAML configuration files. These section are used to configure Command and Channel objects for hardware objects. --- mxcubecore/HardwareRepository.py | 3 + mxcubecore/model/protocols/__init__.py | 0 mxcubecore/model/protocols/epics.py | 55 +++ mxcubecore/model/protocols/exporter.py | 79 ++++ mxcubecore/model/protocols/tango.py | 80 ++++ mxcubecore/protocols_config.py | 145 +++++++ test/pytest/data/epics_channels.yaml | 12 + test/pytest/data/exporter_channels.yaml | 11 + test/pytest/data/exporter_commands.yaml | 12 + .../data/exporter_commands_channels.yaml | 19 + test/pytest/data/exporter_duo.yaml | 20 + test/pytest/data/no_commands_channels.yaml | 4 + test/pytest/data/tango_channels.yaml | 11 + test/pytest/data/tango_commands.yaml | 11 + test/pytest/data/tango_commands_channels.yaml | 21 + test/pytest/data/tango_duo.yaml | 20 + test/pytest/test_setup_commands_channels.py | 399 ++++++++++++++++++ 17 files changed, 902 insertions(+) create mode 100644 mxcubecore/model/protocols/__init__.py create mode 100644 mxcubecore/model/protocols/epics.py create mode 100644 mxcubecore/model/protocols/exporter.py create mode 100644 mxcubecore/model/protocols/tango.py create mode 100644 mxcubecore/protocols_config.py create mode 100644 test/pytest/data/epics_channels.yaml create mode 100644 test/pytest/data/exporter_channels.yaml create mode 100644 test/pytest/data/exporter_commands.yaml create mode 100644 test/pytest/data/exporter_commands_channels.yaml create mode 100644 test/pytest/data/exporter_duo.yaml create mode 100644 test/pytest/data/no_commands_channels.yaml create mode 100644 test/pytest/data/tango_channels.yaml create mode 100644 test/pytest/data/tango_commands.yaml create mode 100644 test/pytest/data/tango_commands_channels.yaml create mode 100644 test/pytest/data/tango_duo.yaml create mode 100644 test/pytest/test_setup_commands_channels.py diff --git a/mxcubecore/HardwareRepository.py b/mxcubecore/HardwareRepository.py index c7d6317a18..19beddde13 100644 --- a/mxcubecore/HardwareRepository.py +++ b/mxcubecore/HardwareRepository.py @@ -54,6 +54,7 @@ HardwareObjectFileParser, ) from mxcubecore.dispatcher import dispatcher +from mxcubecore.protocols_config import setup_commands_channels from mxcubecore.utils.conversion import ( make_table, string_types, @@ -184,6 +185,8 @@ def load_from_yaml( # Set configuration with non-object properties. result._config = result.HOConfig(**config) + setup_commands_channels(result, configuration) + if _container is None: load_time = 1000 * (time.time() - start_time) msg1 = "Start loading contents:" diff --git a/mxcubecore/model/protocols/__init__.py b/mxcubecore/model/protocols/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/mxcubecore/model/protocols/epics.py b/mxcubecore/model/protocols/epics.py new file mode 100644 index 0000000000..bb456e7a14 --- /dev/null +++ b/mxcubecore/model/protocols/epics.py @@ -0,0 +1,55 @@ +"""Models the `epics` section of YAML hardware configuration file. + +Provides an API to read configured EPICS channels. +""" + +from typing import ( + Dict, + Iterable, + Optional, + Tuple, +) + +from pydantic import ( + BaseModel, + RootModel, +) + + +class Channel(BaseModel): + """EPICS channel configuration.""" + + suffix: Optional[str] = None + polling_period: Optional[int] = None + + +class Prefix(BaseModel): + """Configuration of an EPICS prefix section.""" + + channels: Optional[Dict[str, Optional[Channel]]] + + def get_channels(self) -> Iterable[Tuple[str, Channel]]: + """Get all channels configured for prefix. + + This method will fill in optional configuration properties for a channel. + """ + + if self.channels is None: + return [] + + for channel_name, channel_config in self.channels.items(): + if channel_config is None: + channel_config = Channel() + + if channel_config.suffix is None: + channel_config.suffix = channel_name + + yield channel_name, channel_config + + +class EpicsConfig(RootModel[Dict[str, Prefix]]): + """The 'epics' section of the hardware object's YAML configuration file.""" + + def get_prefixes(self) -> Iterable[Tuple[str, Prefix]]: + """Get all prefixes specified in this section.""" + return list(self.root.items()) diff --git a/mxcubecore/model/protocols/exporter.py b/mxcubecore/model/protocols/exporter.py new file mode 100644 index 0000000000..e773f5a078 --- /dev/null +++ b/mxcubecore/model/protocols/exporter.py @@ -0,0 +1,79 @@ +"""Models the `exporter` section of YAML hardware configuration file. + +Provides an API to read configured exporter channels and commands. +""" + +from typing import ( + Dict, + Iterable, + Optional, + Tuple, +) + +from pydantic import ( + BaseModel, + RootModel, +) + + +class Command(BaseModel): + """Exporter command configuration.""" + + # name of the exporter device command + name: Optional[str] = None + + +class Channel(BaseModel): + """Exporter channel configuration.""" + + attribute: Optional[str] = None + + +class Address(BaseModel): + """Configuration of an exporter end point.""" + + commands: Optional[Dict[str, Optional[Command]]] = None + channels: Optional[Dict[str, Optional[Channel]]] = None + + def get_commands(self) -> Iterable[tuple[str, Command]]: + """Get all commands configured for this exporter address. + + This method will fill in optional configuration properties the commands. + """ + + if self.commands is None: + return [] + + for command_name, command_config in self.commands.items(): + if command_config is None: + command_config = Command() + + if command_config.name is None: + command_config.name = command_name + + yield command_name, command_config + + def get_channels(self) -> Iterable[Tuple[str, Channel]]: + """Get all channels configured for this exporter address. + + This method will fill in optional configuration properties for channels. + """ + if self.channels is None: + return [] + + for channel_name, channel_config in self.channels.items(): + if channel_config is None: + channel_config = Channel() + + if channel_config.attribute is None: + channel_config.attribute = channel_name + + yield channel_name, channel_config + + +class ExporterConfig(RootModel[Dict[str, Address]]): + """The 'exporter' section of the hardware object's YAML configuration file.""" + + def get_addresses(self) -> Iterable[Tuple[str, Address]]: + """Get all exporter addresses specified in this section.""" + return list(self.root.items()) diff --git a/mxcubecore/model/protocols/tango.py b/mxcubecore/model/protocols/tango.py new file mode 100644 index 0000000000..1bea3e793a --- /dev/null +++ b/mxcubecore/model/protocols/tango.py @@ -0,0 +1,80 @@ +"""Models the `tango` section of YAML hardware configuration file. + +Provides an API to read configured tango channels and commands. +""" + +from typing import ( + Dict, + Iterable, + Optional, + Tuple, +) + +from pydantic import ( + BaseModel, + RootModel, +) + + +class Command(BaseModel): + """Tango command configuration.""" + + # name of the tango device command + name: Optional[str] = None + + +class Channel(BaseModel): + """Tango channel configuration.""" + + attribute: Optional[str] = None + polling_period: Optional[int] = None + timeout: Optional[int] = None + + +class Device(BaseModel): + """Configuration of a tango device.""" + + commands: Optional[Dict[str, Optional[Command]]] = None + channels: Optional[Dict[str, Optional[Channel]]] = None + + def get_commands(self) -> Iterable[Tuple[str, Command]]: + """Get all commands configured for this device. + + This method will fill in optional configuration properties for commands. + """ + if self.commands is None: + return [] + + for command_name, command_config in self.commands.items(): + if command_config is None: + command_config = Command() + + if command_config.name is None: + command_config.name = command_name + + yield command_name, command_config + + def get_channels(self) -> Iterable[Tuple[str, Channel]]: + """Get all channels configured for this device. + + This method will fill in optional configuration properties for a channel. + """ + if self.channels is None: + return [] + + for channel_name, channel_config in self.channels.items(): + if channel_config is None: + channel_config = Channel() + + if channel_config.attribute is None: + channel_config.attribute = channel_name + + yield channel_name, channel_config + + +class TangoConfig(RootModel[Dict[str, Device]]): + """The 'tango' section of the hardware object's YAML configuration file.""" + + def get_tango_devices(self) -> Iterable[Tuple[str, Device]]: + """Get all tango devices specified in this section.""" + return list(self.root.items()) diff --git a/mxcubecore/protocols_config.py b/mxcubecore/protocols_config.py new file mode 100644 index 0000000000..3c0d9c29e3 --- /dev/null +++ b/mxcubecore/protocols_config.py @@ -0,0 +1,145 @@ +""" +Provides an API to add Command and Channel objects to hardware objects, +as specified in it's YAML configuration file. + +See setup_commands_channels() function for details. +""" + +from __future__ import annotations + +from typing import ( + Callable, + Iterable, +) + +from mxcubecore.BaseHardwareObjects import HardwareObject + + +def _setup_tango_commands_channels(hwobj: HardwareObject, tango_config: dict): + """Set up Tango Command and Channel objects. + + parameters: + tango: the 'tango' section of the hardware object's configuration + """ + from mxcubecore.model.protocols.tango import ( + Device, + TangoConfig, + ) + + def setup_tango_device(device_name: str, device_config: Device): + # + # set-up commands + # + for command_name, command_config in device_config.get_commands(): + attrs = dict(type="tango", name=command_name, tangoname=device_name) + hwobj.add_command(attrs, command_config.name) + + # + # set-up channels + # + for channel_name, channel_config in device_config.get_channels(): + attrs = dict(type="tango", name=channel_name, tangoname=device_name) + + if channel_config.polling_period: + attrs["polling"] = channel_config.polling_period + + if channel_config.timeout: + attrs["timeout"] = channel_config.timeout + + hwobj.add_channel(attrs, channel_config.attribute) + + tango_cfg = TangoConfig.model_validate(tango_config) + for device_name, device_config in tango_cfg.get_tango_devices(): + setup_tango_device(device_name, device_config) + + +def _setup_exporter_commands_channels(hwobj: HardwareObject, exporter_config: dict): + from mxcubecore.model.protocols.exporter import ( + Address, + ExporterConfig, + ) + + def setup_address(address: str, address_config: Address): + # + # set-up commands + # + for command_name, command_config in address_config.get_commands(): + attrs = dict(type="exporter", exporter_address=address, name=command_name) + hwobj.add_command(attrs, command_config.name) + + # + # set-up channels + # + for channel_name, channel_config in address_config.get_channels(): + attrs = dict(type="exporter", exporter_address=address, name=channel_name) + hwobj.add_channel(attrs, channel_config.attribute) + + exp_cfg = ExporterConfig.model_validate(exporter_config) + for address, address_config in exp_cfg.get_addresses(): + setup_address(address, address_config) + + +def _setup_epics_channels(hwobj: HardwareObject, epics_config: dict): + from mxcubecore.model.protocols.epics import ( + EpicsConfig, + Prefix, + ) + + def setup_prefix(prefix: str, prefix_config: Prefix): + # + # set-up channels + # + for channel_name, channel_config in prefix_config.get_channels(): + attrs = dict(type="epics", name=channel_name) + if channel_config.polling_period: + attrs["polling"] = channel_config.polling_period + + pv_name = f"{prefix}{channel_config.suffix}" + hwobj.add_channel(attrs, pv_name) + + epics_cfg = EpicsConfig.model_validate(epics_config) + for prefix, prefix_config in epics_cfg.get_prefixes(): + setup_prefix(prefix, prefix_config) + + +def _protocol_handles(): + return { + "tango": _setup_tango_commands_channels, + "exporter": _setup_exporter_commands_channels, + "epics": _setup_epics_channels, + } + + +def _get_protocol_names() -> Iterable[str]: + """Get names of all supported protocols.""" + return _protocol_handles().keys() + + +def _get_protocol_handler(protocol_name: str) -> Callable: + """Get the callable that will set up commands and channels for a specific protocol.""" + return _protocol_handles()[protocol_name] + + +def _setup_protocol(hwobj: HardwareObject, config: dict, protocol: str): + """Add the Command and Channel objects configured in the specified protocol section. + + parameters: + protocol: name of the protocol to handle + """ + protocol_config = config.get(protocol) + if protocol_config is None: + # no configuration for this protocol + return + + _get_protocol_handler(protocol)(hwobj, protocol_config) + + +def setup_commands_channels(hwobj: HardwareObject, config: dict): + """Add the Command and Channel objects to a hardware object, as specified in the config. + + parameters: + hwobj: hardware object where to add Command and Channel objects + config: the complete hardware object configuration, i.e. parsed YAML file as dict + """ + for protocol in _get_protocol_names(): + _setup_protocol(hwobj, config, protocol) diff --git a/test/pytest/data/epics_channels.yaml b/test/pytest/data/epics_channels.yaml new file mode 100644 index 0000000000..dd8c3c2020 --- /dev/null +++ b/test/pytest/data/epics_channels.yaml @@ -0,0 +1,12 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: EpicTest +configuration: {} +epics: + "test:pre:fix:": + channels: + simple: + fancy: + suffix: "epics_suffix" + polling_period: 1234 diff --git a/test/pytest/data/exporter_channels.yaml b/test/pytest/data/exporter_channels.yaml new file mode 100644 index 0000000000..275c3c01c3 --- /dev/null +++ b/test/pytest/data/exporter_channels.yaml @@ -0,0 +1,11 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: Dummy +configuration: {} +exporter: + "kiwi.com:54321": + channels: + cairo: + luxor: + quena: diff --git a/test/pytest/data/exporter_commands.yaml b/test/pytest/data/exporter_commands.yaml new file mode 100644 index 0000000000..70aacc418f --- /dev/null +++ b/test/pytest/data/exporter_commands.yaml @@ -0,0 +1,12 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: Dummy +configuration: {} +exporter: + "example.com:4321": + commands: + Plain: + Fancy: + name: "spicy" + Third: diff --git a/test/pytest/data/exporter_commands_channels.yaml b/test/pytest/data/exporter_commands_channels.yaml new file mode 100644 index 0000000000..1cce2e7d2c --- /dev/null +++ b/test/pytest/data/exporter_commands_channels.yaml @@ -0,0 +1,19 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: Dummy +configuration: {} +exporter: + "example.com:8844": + commands: + # command with all default configs + plain: + # command with all supported configs + fancy: + name: "exporter_cmd" + channels: + # channel with all default configs + simple: + # channel with all supported configs + delux: + attribute: "exporter_attr" diff --git a/test/pytest/data/exporter_duo.yaml b/test/pytest/data/exporter_duo.yaml new file mode 100644 index 0000000000..197f0f7ab3 --- /dev/null +++ b/test/pytest/data/exporter_duo.yaml @@ -0,0 +1,20 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: Duo +configuration: {} +exporter: + "example1.com:1111": + commands: + Ex1Plain: + Ex1Vanilla: + channels: + Ex1One: + Ex1Two: + "example2.com:2222": + commands: + Ex2Plain: + Ex2Vanilla: + channels: + Ex2One: + Ex2Two: diff --git a/test/pytest/data/no_commands_channels.yaml b/test/pytest/data/no_commands_channels.yaml new file mode 100644 index 0000000000..45f0637c5f --- /dev/null +++ b/test/pytest/data/no_commands_channels.yaml @@ -0,0 +1,4 @@ +%YAML 1.2 +--- +class: Dummy +configuration: {} diff --git a/test/pytest/data/tango_channels.yaml b/test/pytest/data/tango_channels.yaml new file mode 100644 index 0000000000..6ab78a0332 --- /dev/null +++ b/test/pytest/data/tango_channels.yaml @@ -0,0 +1,11 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: Dummy +configuration: {} +tango: + my/channels/device: + channels: + red: + green: + cyan: diff --git a/test/pytest/data/tango_commands.yaml b/test/pytest/data/tango_commands.yaml new file mode 100644 index 0000000000..b1a509f506 --- /dev/null +++ b/test/pytest/data/tango_commands.yaml @@ -0,0 +1,11 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: Dummy +configuration: {} +tango: + my/commands/device: + commands: + Go: + Stop: + Abort: diff --git a/test/pytest/data/tango_commands_channels.yaml b/test/pytest/data/tango_commands_channels.yaml new file mode 100644 index 0000000000..898a4317c4 --- /dev/null +++ b/test/pytest/data/tango_commands_channels.yaml @@ -0,0 +1,21 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: Dummy +configuration: {} +tango: + my/test/device: + commands: + # command with all default configs + plain: + # command with all supported configs + fancy: + name: "tango_cmd" + channels: + # channel with all default configs + simple: + # channel with all supported configs + delux: + attribute: "tango_attr" + timeout: 123 + polling_period: 52 diff --git a/test/pytest/data/tango_duo.yaml b/test/pytest/data/tango_duo.yaml new file mode 100644 index 0000000000..5f8f8c9222 --- /dev/null +++ b/test/pytest/data/tango_duo.yaml @@ -0,0 +1,20 @@ +# yamllint disable rule:empty-values +%YAML 1.2 +--- +class: Dummy +configuration: {} +tango: + my/first/device: + commands: + firstDevCmd1: + firstDevCmd2: + channels: + simple: + cyan: + my/second/device: + commands: + secondDevCmd1: + secondDevCmd2: + channels: + red: + green: diff --git a/test/pytest/test_setup_commands_channels.py b/test/pytest/test_setup_commands_channels.py new file mode 100644 index 0000000000..82bb6aec86 --- /dev/null +++ b/test/pytest/test_setup_commands_channels.py @@ -0,0 +1,399 @@ +""" +Test mxcubecore.protocols_config.setup_commands_channels() function. + +Check that setting up command and channel objects using Tango, +exporter and EPICS works. +""" + +from dataclasses import dataclass +from pathlib import Path +from typing import Optional + +import mock +import pytest + +from mxcubecore.BaseHardwareObjects import HardwareObject +from mxcubecore.HardwareRepository import yaml as yaml_parser +from mxcubecore.protocols_config import setup_commands_channels + + +@dataclass +class _MockedAttribute: + name: str + + +class _MockedDeviceProxy: + def __init__(self, _name): + pass + + def ping(self): + pass + + def set_timeout_millis(self, _timeout): + pass + + def attribute_list_query(self): + return [ + _MockedAttribute("simple"), + _MockedAttribute("tango_attr"), + _MockedAttribute("red"), + _MockedAttribute("green"), + _MockedAttribute("cyan"), + ] + + +@dataclass +class _MockedEpicsCommand: + pv_name: str + arg_list = None + + def poll(self, *_, **__): + pass + + +class _TestHardwareObject(HardwareObject): + pass + + +def _get_data_path(filename: str) -> Path: + return Path(Path(__file__).parent, "data", filename) + + +def _parse_yaml_config(filename: str): + + with _get_data_path(filename).open() as f: + return yaml_parser.load(f) + + +@pytest.fixture +def test_hwo(): + return _TestHardwareObject("test") + + +@dataclass +class _TangoCh: + """describes expected tango channel object""" + + device_name: str + attribute_name: str + polling: Optional[int] = None + timeout: Optional[int] = None + + +@dataclass +class _TangoCmd: + """describes expected tango command object""" + + device_name: str + command: str + + +@dataclass +class _ExporterCh: + """describes expected exporter channel object""" + + attribute_name: str + + +@dataclass +class _ExporterCmd: + """describes expected exporter command object""" + + command: str + + +@dataclass +class _EpicsCh: + """describes expected EPICS channel object""" + + pv_name: str + polling: Optional[int] = None + + +def test_no_commands_channels(test_hwo): + """test loading a config that does not specify any command or channels""" + config = _parse_yaml_config("no_commands_channels.yaml") + setup_commands_channels(test_hwo, config) + + # there should be no channels nor commands setup + assert list(test_hwo.get_channels()) == [] + assert list(test_hwo.get_commands()) == [] + + +def _assert_tango_channels(channels, expected_channels): + channels = {channel.name(): channel for channel in channels} + + # check by name that we got all the expected channels + assert channels.keys() == expected_channels.keys() + + # check the details of each channel + for name, channel in channels.items(): + expected = expected_channels[name] + + assert channel.device_name == expected.device_name + assert channel.attribute_name == expected.attribute_name + assert channel.polling == expected.polling + if expected.timeout is not None: + assert channel.timeout == expected.timeout + + +def _assert_tango_commands(commands, expected_commands): + commands = {command.name(): command for command in commands} + + # check by name that we got all the expected channels + assert commands.keys() == expected_commands.keys() + + # check the details of each channel + for name, command in commands.items(): + expected = expected_commands[name] + assert command.device_name == expected.device_name + assert command.command == expected.command + + +def test_tango_commands_channels(test_hwo): + """test loading config with some tango commands and channels""" + dev_name = "my/test/device" + + config = _parse_yaml_config("tango_commands_channels.yaml") + + with mock.patch("mxcubecore.Command.Tango.DeviceProxy", _MockedDeviceProxy): + setup_commands_channels(test_hwo, config) + + expected_channels = { + "simple": _TangoCh(dev_name, "simple"), + "delux": _TangoCh(dev_name, "tango_attr", polling=52, timeout=123), + } + _assert_tango_channels(test_hwo.get_channels(), expected_channels) + + expected_commands = { + "plain": _TangoCmd(dev_name, "plain"), + "fancy": _TangoCmd(dev_name, "tango_cmd"), + } + _assert_tango_commands(test_hwo.get_commands(), expected_commands) + + +def test_tango_commands(test_hwo): + """test loading config with tango commands but no channels""" + + dev_name = "my/commands/device" + + config = _parse_yaml_config("tango_commands.yaml") + + with mock.patch("mxcubecore.Command.Tango.DeviceProxy", _MockedDeviceProxy): + setup_commands_channels(test_hwo, config) + + # there should be no channels + assert list(test_hwo.get_channels()) == [] + + expected_commands = { + "Go": _TangoCmd(dev_name, "Go"), + "Stop": _TangoCmd(dev_name, "Stop"), + "Abort": _TangoCmd(dev_name, "Abort"), + } + _assert_tango_commands(test_hwo.get_commands(), expected_commands) + + +def test_tango_channels(test_hwo): + """test loading config with tango channels but no commands""" + + dev_name = "my/channels/device" + + config = _parse_yaml_config("tango_channels.yaml") + + with mock.patch("mxcubecore.Command.Tango.DeviceProxy", _MockedDeviceProxy): + setup_commands_channels(test_hwo, config) + + # there should be no commands + assert list(test_hwo.get_commands()) == [] + + expected_channels = { + "red": _TangoCh(dev_name, "red"), + "green": _TangoCh(dev_name, "green"), + "cyan": _TangoCh(dev_name, "cyan"), + } + _assert_tango_channels( + test_hwo.get_channels(), + expected_channels, + ) + + +def test_tango_duo(test_hwo): + """test loading config with channels and commands from two tango devices""" + + first_dev = "my/first/device" + second_dev = "my/second/device" + + config = _parse_yaml_config("tango_duo.yaml") + + with mock.patch("mxcubecore.Command.Tango.DeviceProxy", _MockedDeviceProxy): + setup_commands_channels(test_hwo, config) + + expected_channels = { + "simple": _TangoCh(first_dev, "simple"), + "cyan": _TangoCh(first_dev, "cyan"), + "red": _TangoCh(second_dev, "red"), + "green": _TangoCh(second_dev, "green"), + } + _assert_tango_channels(test_hwo.get_channels(), expected_channels) + + expected_commands = { + "firstDevCmd1": _TangoCmd(first_dev, "firstDevCmd1"), + "firstDevCmd2": _TangoCmd(first_dev, "firstDevCmd2"), + "secondDevCmd1": _TangoCmd(second_dev, "secondDevCmd1"), + "secondDevCmd2": _TangoCmd(second_dev, "secondDevCmd2"), + } + _assert_tango_commands(test_hwo.get_commands(), expected_commands) + + +def _assert_exporter_channels(channels, expected_channels): + channels = {channel.name(): channel for channel in channels} + + # check by name that we got all the expected channels + assert channels.keys() == expected_channels.keys() + + # check the details of each channel + for name, channel in channels.items(): + expected = expected_channels[name] + assert channel.attribute_name == expected.attribute_name + + +def _assert_exporter_commands(commands, expected_commands): + commands = {command.name(): command for command in commands} + + # check by name that we got all the expected channels + assert commands.keys() == expected_commands.keys() + + # check the details of each channel + for name, command in commands.items(): + expected = expected_commands[name] + assert command.command == expected.command + + +def test_exporter(test_hwo): + """test loading config with some exporter commands and channels""" + config = _parse_yaml_config("exporter_commands_channels.yaml") + + with mock.patch("mxcubecore.Command.Exporter.start_exporter") as start_exporter: + setup_commands_channels(test_hwo, config) + + # check that we connected to correct exporter address + start_exporter.assert_called_with("example.com", 8844, mock.ANY) + + expected_channels = { + "simple": _ExporterCh("simple"), + "delux": _ExporterCh("exporter_attr"), + } + + _assert_exporter_channels(test_hwo.get_channels(), expected_channels) + + expected_commands = { + "plain": _ExporterCmd("plain"), + "fancy": _ExporterCmd("exporter_cmd"), + } + _assert_exporter_commands(test_hwo.get_commands(), expected_commands) + + +def test_exporter_commands(test_hwo): + """test loading config with exporter commands but no channels""" + + config = _parse_yaml_config("exporter_commands.yaml") + + with mock.patch("mxcubecore.Command.Exporter.start_exporter") as start_exporter: + setup_commands_channels(test_hwo, config) + + # check that we connected to correct exporter address + start_exporter.assert_called_with("example.com", 4321, mock.ANY) + + # there should be no channels + assert list(test_hwo.get_channels()) == [] + + expected_commands = { + "Plain": _ExporterCmd("Plain"), + "Fancy": _ExporterCmd("spicy"), + "Third": _ExporterCmd("Third"), + } + _assert_exporter_commands(test_hwo.get_commands(), expected_commands) + + +def test_exporter_channels(test_hwo): + """test loading config with exporter channels but no commands""" + config = _parse_yaml_config("exporter_channels.yaml") + + with mock.patch("mxcubecore.Command.Exporter.start_exporter") as start_exporter: + setup_commands_channels(test_hwo, config) + + # check that we connected to correct exporter address + start_exporter.assert_called_with("kiwi.com", 54321, mock.ANY) + + expected_channels = { + "cairo": _ExporterCh("cairo"), + "luxor": _ExporterCh("luxor"), + "quena": _ExporterCh("quena"), + } + _assert_exporter_channels(test_hwo.get_channels(), expected_channels) + + # there should be no commands + assert list(test_hwo.get_commands()) == [] + + +def test_exporter_duo(test_hwo): + """test loading config with channels and commands from two exporter addresses""" + config = _parse_yaml_config("exporter_duo.yaml") + + with mock.patch("mxcubecore.Command.Exporter.start_exporter") as start_exporter: + setup_commands_channels(test_hwo, config) + + # check that we connected to correct exporter address + start_exporter.assert_any_call("example1.com", 1111, mock.ANY) + start_exporter.assert_any_call("example2.com", 2222, mock.ANY) + + expected_channels = { + "Ex1One": _ExporterCh("Ex1One"), + "Ex1Two": _ExporterCh("Ex1Two"), + "Ex2One": _ExporterCh("Ex2One"), + "Ex2Two": _ExporterCh("Ex2Two"), + } + + _assert_exporter_channels(test_hwo.get_channels(), expected_channels) + + expected_commands = { + "Ex1Plain": _ExporterCmd("Ex1Plain"), + "Ex1Vanilla": _ExporterCmd("Ex1Vanilla"), + "Ex2Plain": _ExporterCmd("Ex2Plain"), + "Ex2Vanilla": _ExporterCmd("Ex2Vanilla"), + } + _assert_exporter_commands(test_hwo.get_commands(), expected_commands) + + +def _assert_epics_channels(channels, expected_channels): + channels = {channel.name(): channel for channel in channels} + + # check by name that we got all the expected channels + assert channels.keys() == expected_channels.keys() + + # check the details of each channel + for name, channel in channels.items(): + expected = expected_channels[name] + assert channel.polling == expected.polling + assert channel.command.pv_name == expected.pv_name + + +def test_epics(test_hwo): + """test loading config with EPICS channels""" + + def _make_mocked_cmd(_, pv_name, *__, **___): + return _MockedEpicsCommand(pv_name) + + pv_prefix = "test:pre:fix:" + + config = _parse_yaml_config("epics_channels.yaml") + + with mock.patch("mxcubecore.Command.Epics.EpicsCommand", _make_mocked_cmd): + setup_commands_channels(test_hwo, config) + + expected_channels = { + "simple": _EpicsCh(f"{pv_prefix}simple", None), + "fancy": _EpicsCh(f"{pv_prefix}epics_suffix", 1234), + } + + _assert_epics_channels(test_hwo.get_channels(), expected_channels) From 76fb9ae0be27add530e6735321d9dfadb52693d5 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Wed, 16 Oct 2024 16:34:42 +0200 Subject: [PATCH 52/53] ISPyB client: handle boolean loginTranslate property correctly When using YAML config file for ISPyBClient hardware object, it feels logical to use YAML boolean type for 'loginTranslate' property. For example a following config file: class: ISPyBClient.ISPyBClient configuration: authServerType: ispyb loginTranslate: false This change fixes a bug where 'self.loginTranslate' would be assigned True value for the config file above. --- mxcubecore/HardwareObjects/ISPyBClient.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mxcubecore/HardwareObjects/ISPyBClient.py b/mxcubecore/HardwareObjects/ISPyBClient.py index c05768266e..fd64d04cf4 100644 --- a/mxcubecore/HardwareObjects/ISPyBClient.py +++ b/mxcubecore/HardwareObjects/ISPyBClient.py @@ -199,7 +199,7 @@ def init(self): if self.ldapConnection is None: logging.getLogger("HWR").debug("LDAP Server is not available") - self.loginTranslate = self.get_property("loginTranslate") or True + self.loginTranslate = self.get_property("loginTranslate", True) self.beamline_name = HWR.beamline.session.beamline_name self.ws_root = self.get_property("ws_root") From 02a5b9739c63425e2dd33fa181162ef8800a2635 Mon Sep 17 00:00:00 2001 From: Elmir Jagudin Date: Mon, 21 Oct 2024 16:24:52 +0200 Subject: [PATCH 53/53] remove GenericDiffractometer's 'zoom' proxy attribute The 'zoom' attribute is now automagically set from the 'objects' settings in YAML/XML configure file. --- mxcubecore/HardwareObjects/GenericDiffractometer.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/mxcubecore/HardwareObjects/GenericDiffractometer.py b/mxcubecore/HardwareObjects/GenericDiffractometer.py index 4e5f970052..c8dde69a5b 100755 --- a/mxcubecore/HardwareObjects/GenericDiffractometer.py +++ b/mxcubecore/HardwareObjects/GenericDiffractometer.py @@ -590,18 +590,6 @@ def alignment_z(self): """ return self.motor_hwobj_dict.get("phiz") - @property - def zoom(self): - """zoom motor object - - NBNB HACK TODO - ocnfigure this in graphics object - (which now calls this property) - - Returns: - AbstractActuator - """ - return self.motor_hwobj_dict.get("zoom") - def is_ready(self): """ Detects if device is ready