diff --git a/aiida/cmdline/commands/cmd_profile.py b/aiida/cmdline/commands/cmd_profile.py index df7fc2331b..db2d017ab0 100644 --- a/aiida/cmdline/commands/cmd_profile.py +++ b/aiida/cmdline/commands/cmd_profile.py @@ -11,10 +11,12 @@ import click from aiida.cmdline.commands.cmd_verdi import verdi +from aiida.cmdline.groups import DynamicEntryPointCommandGroup from aiida.cmdline.params import arguments, options +from aiida.cmdline.params.options.commands import setup from aiida.cmdline.utils import defaults, echo from aiida.common import exceptions -from aiida.manage.configuration import get_config +from aiida.manage.configuration import Profile, create_profile, get_config @verdi.group('profile') @@ -22,6 +24,41 @@ def verdi_profile(): """Inspect and manage the configured profiles.""" +def command_create_profile(ctx: click.Context, storage_cls, non_interactive: bool, profile: Profile, **kwargs): # pylint: disable=unused-argument + """Create a new profile, initialise its storage and create a default user. + + :param ctx: The context of the CLI command. + :param storage_cls: The storage class obtained through loading the entry point from ``aiida.storage`` group. + :param non_interactive: Whether the command was invoked interactively or not. + :param profile: The profile instance. This is an empty ``Profile`` instance created by the command line argument + which currently only contains the selected profile name for the profile that is to be created. + :param kwargs: Arguments to initialise instance of the selected storage implementation. + """ + try: + profile = create_profile(ctx.obj.config, storage_cls, name=profile.name, **kwargs) + except (ValueError, TypeError, exceptions.EntryPointError, exceptions.StorageMigrationError) as exception: + echo.echo_critical(str(exception)) + + echo.echo_success(f'Created new profile `{profile.name}`.') + + +@verdi_profile.group( + 'setup', + cls=DynamicEntryPointCommandGroup, + command=command_create_profile, + entry_point_group='aiida.storage', + shared_options=[ + setup.SETUP_PROFILE(), + setup.SETUP_USER_EMAIL(), + setup.SETUP_USER_FIRST_NAME(), + setup.SETUP_USER_LAST_NAME(), + setup.SETUP_USER_INSTITUTION(), + ] +) +def profile_setup(): + """Set up a new profile.""" + + @verdi_profile.command('list') def profile_list(): """Display a list of all available profiles.""" diff --git a/aiida/cmdline/groups/dynamic.py b/aiida/cmdline/groups/dynamic.py index 89643ef514..a32b9aa847 100644 --- a/aiida/cmdline/groups/dynamic.py +++ b/aiida/cmdline/groups/dynamic.py @@ -121,10 +121,30 @@ def list_options(self, entry_point: str) -> list: :param entry_point: The entry point. """ - return [ - self.create_option(*item) - for item in self.factory(entry_point).get_cli_options().items() # type: ignore[union-attr] - ] + cls = self.factory(entry_point) + + if not hasattr(cls, 'Configuration'): + from aiida.common.warnings import warn_deprecation + warn_deprecation( + 'Relying on `_get_cli_options` is deprecated. The options should be defined through a ' + '`pydantic.BaseModel` that should be assigned to the `Config` class attribute.', + version=3 + ) + options_spec = self.factory(entry_point).get_cli_options() # type: ignore[union-attr] + else: + + options_spec = {} + + for key, field_info in cls.Configuration.model_fields.items(): + options_spec[key] = { + 'required': field_info.is_required(), + 'type': field_info.annotation, + 'prompt': field_info.title, + 'default': field_info.default if field_info.default is not None else None, + 'help': field_info.description, + } + + return [self.create_option(*item) for item in options_spec.items()] @staticmethod def create_option(name, spec: dict) -> t.Callable[[t.Any], t.Any]: @@ -136,6 +156,7 @@ def create_option(name, spec: dict) -> t.Callable[[t.Any], t.Any]: name_dashed = name.replace('_', '-') option_name = f'--{name_dashed}/--no-{name_dashed}' if is_flag else f'--{name_dashed}' option_short_name = spec.pop('short_name', None) + option_names = (option_short_name, option_name) if option_short_name else (option_name,) kwargs = {'cls': spec.pop('cls', InteractiveOption), 'show_default': True, 'is_flag': is_flag, **spec} @@ -144,9 +165,4 @@ def create_option(name, spec: dict) -> t.Callable[[t.Any], t.Any]: if kwargs['cls'] is InteractiveOption and is_flag and default is None: kwargs['cls'] = functools.partial(InteractiveOption, prompt_fn=lambda ctx: False) - if option_short_name: - option = click.option(option_short_name, option_name, **kwargs) - else: - option = click.option(option_name, **kwargs) - - return option + return click.option(*(option_names), **kwargs) diff --git a/aiida/manage/configuration/__init__.py b/aiida/manage/configuration/__init__.py index fe92492bc1..4a5873c88e 100644 --- a/aiida/manage/configuration/__init__.py +++ b/aiida/manage/configuration/__init__.py @@ -184,6 +184,43 @@ def profile_context(profile: Optional[str] = None, allow_switch=False) -> 'Profi manager.load_profile(current_profile, allow_switch=True) +def create_profile( + config: Config, + storage_cls, + *, + name: str, + email: str, + first_name: Optional[str] = None, + last_name: Optional[str] = None, + institution: Optional[str] = None, + **kwargs +) -> Profile: + """Create a new profile, initialise its storage and create a default user. + + :param config: The config instance. + :param storage_cls: The storage class obtained through loading the entry point from ``aiida.storage`` group. + :param name: Name of the profile. + :param email: Email for the default user. + :param first_name: First name for the default user. + :param last_name: Last name for the default user. + :param institution: Institution for the default user. + :param kwargs: Arguments to initialise instance of the selected storage implementation. + """ + from aiida.orm import User + + storage_config = storage_cls.Configuration(**{k: v for k, v in kwargs.items() if v is not None}).dict() + profile: Profile = config.create_profile(name=name, storage_cls=storage_cls, storage_config=storage_config) + + with profile_context(profile.name, allow_switch=True): + user = User(email=email, first_name=first_name, last_name=last_name, institution=institution).store() + profile.default_user_email = user.email + + config.update_profile(profile) + config.store() + + return profile + + def reset_config(): """Reset the globally loaded config. diff --git a/aiida/manage/configuration/config.py b/aiida/manage/configuration/config.py index d88f81f30c..89014670bb 100644 --- a/aiida/manage/configuration/config.py +++ b/aiida/manage/configuration/config.py @@ -16,9 +16,11 @@ from __future__ import annotations import codecs +import contextlib +import io import json import os -from typing import Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type import uuid from pydantic import ( # pylint: disable=no-name-in-module @@ -30,14 +32,19 @@ field_validator, ) -from aiida.common.exceptions import ConfigurationError -from aiida.common.log import LogLevels +from aiida.common.exceptions import ConfigurationError, EntryPointError, StorageMigrationError +from aiida.common.log import AIIDA_LOGGER, LogLevels from .options import Option, get_option, get_option_names, parse_option from .profile import Profile __all__ = ('Config',) +if TYPE_CHECKING: + from aiida.orm.implementation.storage_backend import StorageBackend + +LOGGER = AIIDA_LOGGER.getChild(__file__) + class ConfigVersionSchema(BaseModel, defer_build=True): """Schema for the version configuration of an AiiDA instance.""" @@ -126,7 +133,6 @@ def validate_caching_identifier_pattern(cls, value: List[str]) -> List[str]: from aiida.manage.caching import _validate_identifier_pattern for identifier in value: _validate_identifier_pattern(identifier=identifier) - return value @@ -446,6 +452,70 @@ def get_profile(self, name: Optional[str] = None) -> Profile: return self._profiles[name] + def create_profile(self, name: str, storage_cls: Type['StorageBackend'], storage_config: dict[str, str]) -> Profile: + """Create a new profile and initialise its storage. + + :param name: The profile name. + :param storage_cls: The :class:`aiida.orm.implementation.storage_backend.StorageBackend` implementation to use. + :param storage_config: The configuration necessary to initialise and connect to the storage backend. + :returns: The created profile. + :raises ValueError: If the profile already exists. + :raises TypeError: If the ``storage_cls`` is not a subclass of + :class:`aiida.orm.implementation.storage_backend.StorageBackend`. + :raises EntryPointError: If the ``storage_cls`` does not have an associated entry point. + :raises StorageMigrationError: If the storage cannot be initialised. + """ + from aiida.orm.implementation.storage_backend import StorageBackend + from aiida.plugins.entry_point import get_entry_point_from_class + + if name in self.profile_names: + raise ValueError(f'The profile `{name}` already exists.') + + if not issubclass(storage_cls, StorageBackend): + raise TypeError( + f'The `storage_cls={storage_cls}` is not subclass of `aiida.orm.implementationStorageBackend`.' + ) + + _, storage_entry_point = get_entry_point_from_class(storage_cls.__module__, storage_cls.__name__) + + if storage_entry_point is None: + raise EntryPointError(f'`{storage_cls}` does not have a registered entry point.') + + profile = Profile( + name, { + 'storage': { + 'backend': storage_entry_point.name, + 'config': storage_config, + }, + 'process_control': { + 'backend': 'rabbitmq', + 'config': { + 'broker_protocol': 'amqp', + 'broker_username': 'guest', + 'broker_password': 'guest', + 'broker_host': '127.0.0.1', + 'broker_port': 5672, + 'broker_virtual_host': '' + } + }, + } + ) + + LOGGER.report('Initialising the storage backend.') + try: + with contextlib.redirect_stdout(io.StringIO()): + profile.storage_cls.initialise(profile) + except Exception as exception: # pylint: disable=broad-except + raise StorageMigrationError( + f'Storage backend initialisation failed, probably because the configuration is incorrect:\n{exception}' + ) + LOGGER.report('Storage initialisation completed.') + + self.add_profile(profile) + self.store() + + return profile + def add_profile(self, profile): """Add a profile to the configuration. diff --git a/aiida/orm/implementation/storage_backend.py b/aiida/orm/implementation/storage_backend.py index bc99001ffb..54f2b6b803 100644 --- a/aiida/orm/implementation/storage_backend.py +++ b/aiida/orm/implementation/storage_backend.py @@ -8,6 +8,8 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Generic backend related objects""" +from __future__ import annotations + import abc from typing import TYPE_CHECKING, Any, ContextManager, List, Optional, Sequence, TypeVar, Union diff --git a/aiida/repository/backend/sandbox.py b/aiida/repository/backend/sandbox.py index ced40bb914..0c16c9ff34 100644 --- a/aiida/repository/backend/sandbox.py +++ b/aiida/repository/backend/sandbox.py @@ -4,6 +4,7 @@ import contextlib import os +import pathlib import shutil import typing as t import uuid @@ -65,7 +66,7 @@ def is_initialised(self) -> bool: def sandbox(self): """Return the sandbox instance of this repository.""" if self._sandbox is None: - self._sandbox = SandboxFolder(filepath=self._filepath) + self._sandbox = SandboxFolder(filepath=pathlib.Path(self._filepath) if self._filepath is not None else None) return self._sandbox diff --git a/aiida/storage/psql_dos/backend.py b/aiida/storage/psql_dos/backend.py index c425fcfe7e..0ee0a8c251 100644 --- a/aiida/storage/psql_dos/backend.py +++ b/aiida/storage/psql_dos/backend.py @@ -15,6 +15,7 @@ import pathlib from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Set, Union +from pydantic import BaseModel, Field from sqlalchemy import column, insert, update from sqlalchemy.orm import Session, scoped_session, sessionmaker @@ -72,6 +73,30 @@ class PsqlDosBackend(StorageBackend): # pylint: disable=too-many-public-methods The `django` backend was removed, to consolidate access to this storage. """ + class Configuration(BaseModel): + """Model describing required information to configure an instance of the storage.""" + + database_engine: str = Field( + title='PostgreSQL engine', + description='The engine to use to connect to the database.', + default='postgresql_psycopg2' + ) + database_hostname: str = Field( + title='PostgreSQL hostname', description='The hostname of the PostgreSQL server.', default='localhost' + ) + database_port: int = Field( + title='PostgreSQL port', description='The port of the PostgreSQL server.', default=5432 + ) + database_username: str = Field( + title='PostgreSQL username', description='The username with which to connect to the PostgreSQL server.' + ) + database_password: str = Field( + title='PostgreSQL password', description='The password with which to connect to the PostgreSQL server.' + ) + database_name: Union[str, None] = Field( + title='PostgreSQL database name', description='The name of the database in the PostgreSQL server.' + ) + migrator = PsqlDosMigrator @classmethod diff --git a/aiida/storage/sqlite_temp/backend.py b/aiida/storage/sqlite_temp/backend.py index c25a9d7777..14a62b3cfb 100644 --- a/aiida/storage/sqlite_temp/backend.py +++ b/aiida/storage/sqlite_temp/backend.py @@ -12,18 +12,19 @@ from contextlib import contextmanager, nullcontext import functools -from functools import cached_property import hashlib import os from pathlib import Path import shutil +from tempfile import mkdtemp from typing import Any, BinaryIO, Iterator, Sequence +from pydantic import BaseModel, Field from sqlalchemy import column, insert, update from sqlalchemy.orm import Session from aiida.common.exceptions import ClosedStorage, IntegrityError -from aiida.manage import Profile +from aiida.manage.configuration import Profile from aiida.orm.entities import EntityTypes from aiida.orm.implementation import BackendEntity, StorageBackend from aiida.repository.backend.sandbox import SandboxRepositoryBackend @@ -41,6 +42,15 @@ class SqliteTempBackend(StorageBackend): # pylint: disable=too-many-public-meth Whenever it is instantiated, it creates a fresh storage backend, and destroys it when it is garbage collected. """ + + class Configuration(BaseModel): + + filepath: str = Field( + title='Temporary directory', + description='Temporary directory in which to store data for this backend.', + default_factory=mkdtemp + ) + _read_only = False @staticmethod @@ -49,7 +59,7 @@ def create_profile( default_user_email='user@email.com', options: dict | None = None, debug: bool = False, - repo_path: str | Path | None = None, + filepath: str | Path | None = None, ) -> Profile: """Create a new profile instance for this backend, from the path to the zip file.""" return Profile( @@ -58,8 +68,8 @@ def create_profile( 'storage': { 'backend': 'core.sqlite_temp', 'config': { + 'filepath': filepath, 'debug': debug, - 'repo_path': repo_path, } }, 'process_control': { @@ -89,7 +99,7 @@ def migrate(cls, profile: Profile): def __init__(self, profile: Profile): super().__init__(profile) self._session: Session | None = None - self._repo: SandboxShaRepositoryBackend | None = None + self._repo: SandboxShaRepositoryBackend = SandboxShaRepositoryBackend(profile.storage_config['filepath']) self._globals: dict[str, tuple[Any, str | None]] = {} self._closed = False self.get_session() # load the database on initialization @@ -135,10 +145,6 @@ def get_session(self) -> Session: def get_repository(self) -> SandboxShaRepositoryBackend: if self._closed: raise ClosedStorage(str(self)) - if self._repo is None: - # to-do this does not seem to be removing the folder on garbage collection? - repo_path = self.profile.storage_config.get('repo_path') - self._repo = SandboxShaRepositoryBackend(filepath=Path(repo_path) if repo_path else None) return self._repo @property @@ -175,31 +181,31 @@ def get_backend_entity(self, model) -> BackendEntity: """Return the backend entity that corresponds to the given Model instance.""" return orm.get_backend_entity(model, self) - @cached_property + @functools.cached_property def authinfos(self): return orm.SqliteAuthInfoCollection(self) - @cached_property + @functools.cached_property def comments(self): return orm.SqliteCommentCollection(self) - @cached_property + @functools.cached_property def computers(self): return orm.SqliteComputerCollection(self) - @cached_property + @functools.cached_property def groups(self): return orm.SqliteGroupCollection(self) - @cached_property + @functools.cached_property def logs(self): return orm.SqliteLogCollection(self) - @cached_property + @functools.cached_property def nodes(self): return orm.SqliteNodeCollection(self) - @cached_property + @functools.cached_property def users(self): return orm.SqliteUserCollection(self) diff --git a/aiida/storage/sqlite_zip/backend.py b/aiida/storage/sqlite_zip/backend.py index dd58451bf1..0c34a2405d 100644 --- a/aiida/storage/sqlite_zip/backend.py +++ b/aiida/storage/sqlite_zip/backend.py @@ -21,6 +21,7 @@ from zipfile import ZipFile, is_zipfile from archive_path import ZipPath, extract_file_in_zip +from pydantic import BaseModel, Field from sqlalchemy.orm import Session from aiida import __version__ @@ -63,6 +64,14 @@ class SqliteZipBackend(StorageBackend): # pylint: disable=too-many-public-metho ... """ + + class Configuration(BaseModel): + + filepath: str = Field( + title='Filepath of the archive', + description='Filepath of the archive in which to store data for this backend.' + ) + _read_only = True @classmethod @@ -427,5 +436,5 @@ def list_objects(self) -> Iterable[str]: def open(self, key: str) -> Iterator[BinaryIO]: if not self._path.joinpath(key).is_file(): raise FileNotFoundError(f'object with key `{key}` does not exist.') - with self._path.joinpath(key).open('rb') as handle: + with self._path.joinpath(key).open('rb', encoding='utf-8') as handle: yield handle diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index f5a41225e7..e7ceeee3e7 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -99,6 +99,9 @@ py:obj aiida.storage.psql_dos.orm.ModelType py:obj aiida.storage.psql_dos.orm.SelfType py:obj aiida.storage.psql_dos.orm.entities.ModelType py:obj aiida.storage.psql_dos.orm.entities.SelfType +py:class aiida.storage.psql_dos.backend.Configuration +py:class aiida.storage.sqlite_temp.backend.Configuration +py:class aiida.storage.sqlite_zip.backend.Configuration py:obj aiida.tools.archive.SelfType py:obj aiida.tools.archive.EntityType py:func QueryBuilder._get_ormclass @@ -132,6 +135,8 @@ py:func click.shell_completion._start_of_option py:meth click.Option.get_default py:meth fail +py:class pydantic.main.BaseModel + py:class requests.models.Response py:class requests.Response diff --git a/docs/source/reference/command_line.rst b/docs/source/reference/command_line.rst index c3ae6e2f7c..df68843a24 100644 --- a/docs/source/reference/command_line.rst +++ b/docs/source/reference/command_line.rst @@ -356,6 +356,7 @@ Below is a list with all available subcommands. delete Delete one or more profiles. list Display a list of all available profiles. setdefault Set a profile as the default one. + setup Set up a new profile. show Show details for a profile. diff --git a/pyproject.toml b/pyproject.toml index 3ead519b85..0eaa9195b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -291,6 +291,7 @@ disable = [ "use-dict-literal", "unnecessary-dunder-call", ] +extension-pkg-whitelist = "pydantic" [tool.pylint.basic] good-names = [ diff --git a/tests/cmdline/commands/test_profile.py b/tests/cmdline/commands/test_profile.py index 7b2d6f20b5..2cd8201d6e 100644 --- a/tests/cmdline/commands/test_profile.py +++ b/tests/cmdline/commands/test_profile.py @@ -131,3 +131,12 @@ def test_delete(run_cli_command, mock_profiles, pg_test_cluster): result = run_cli_command(cmd_profile.profile_list, use_subprocess=False) assert profile_list[2] not in result.output assert profile_list[3] not in result.output + + +def test_setup(run_cli_command, isolated_config, tmp_path): + """Test the ``verdi profile setup`` command.""" + profile_name = 'temp-profile' + options = ['core.sqlite_temp', '-n', '--filepath', str(tmp_path), '--profile', profile_name] + result = run_cli_command(cmd_profile.profile_setup, options, use_subprocess=False) + assert f'Created new profile `{profile_name}`.' in result.output + assert profile_name in isolated_config.profile_names diff --git a/tests/manage/configuration/test_config.py b/tests/manage/configuration/test_config.py index 613a346617..35786ff683 100644 --- a/tests/manage/configuration/test_config.py +++ b/tests/manage/configuration/test_config.py @@ -11,6 +11,7 @@ import json import os import pathlib +import uuid import pytest @@ -18,6 +19,8 @@ from aiida.manage.configuration import Config, Profile, settings from aiida.manage.configuration.migrations import CURRENT_CONFIG_VERSION, OLDEST_COMPATIBLE_CONFIG_VERSION from aiida.manage.configuration.options import get_option +from aiida.orm.implementation.storage_backend import StorageBackend +from aiida.storage.sqlite_temp import SqliteTempBackend @pytest.fixture @@ -418,3 +421,38 @@ def test_delete_profile(config_with_profile, profile_factory): # Now reload the config from disk to make sure the changes after deletion were persisted to disk config_on_disk = Config.from_file(config.filepath) assert profile_name not in config_on_disk.profile_names + + +def test_create_profile_raises(config_with_profile, monkeypatch): + """Test the ``create_profile`` method when it raises.""" + config = config_with_profile + profile_name = uuid.uuid4().hex + + def raise_storage_migration_error(*args, **kwargs): + raise exceptions.StorageMigrationError() + + monkeypatch.setattr(SqliteTempBackend, 'initialise', raise_storage_migration_error) + + class UnregisteredStorageBackend(StorageBackend): + pass + + with pytest.raises(ValueError, match=r'The profile `.*` already exists.'): + config.create_profile(config_with_profile.default_profile_name, SqliteTempBackend, {}) + + with pytest.raises(TypeError, match=r'The `storage_cls=.*` is not subclass of `.*`.'): + config.create_profile(profile_name, object, {}) + + with pytest.raises(exceptions.EntryPointError, match=r'.*does not have a registered entry point.'): + config.create_profile(profile_name, UnregisteredStorageBackend, {}) + + with pytest.raises(exceptions.StorageMigrationError, match='Storage backend initialisation failed.*'): + config.create_profile(profile_name, SqliteTempBackend, {}) + + +def test_create_profile(config_with_profile): + """Test the ``create_profile`` method.""" + config = config_with_profile + profile_name = uuid.uuid4().hex + + config.create_profile(profile_name, SqliteTempBackend, {}) + assert profile_name in config.profile_names diff --git a/tests/manage/configuration/test_configuration.py b/tests/manage/configuration/test_configuration.py index de9fd3a417..e363bd9a60 100644 --- a/tests/manage/configuration/test_configuration.py +++ b/tests/manage/configuration/test_configuration.py @@ -3,8 +3,19 @@ import pytest import aiida -from aiida.manage.configuration import get_profile, profile_context +from aiida.manage.configuration import Profile, create_profile, get_profile, profile_context from aiida.manage.manager import get_manager +from aiida.storage.sqlite_temp.backend import SqliteTempBackend + + +def test_create_profile(isolated_config, tmp_path): + """Test :func:`aiida.manage.configuration.tools.create_profile`.""" + profile_name = 'testing' + profile = create_profile( + isolated_config, SqliteTempBackend, name=profile_name, email='test@localhost', filepath=str(tmp_path) + ) + assert isinstance(profile, Profile) + assert profile_name in isolated_config.profile_names def test_check_version_release(monkeypatch, capsys, isolated_config): diff --git a/tests/storage/sqlite/test_archive.py b/tests/storage/sqlite/test_archive.py index b370b67966..6169876c08 100644 --- a/tests/storage/sqlite/test_archive.py +++ b/tests/storage/sqlite/test_archive.py @@ -12,7 +12,7 @@ def test_basic(tmp_path): filename = Path(tmp_path / 'export.aiida') # generate a temporary backend - profile1 = SqliteTempBackend.create_profile(repo_path=str(tmp_path / 'repo1')) + profile1 = SqliteTempBackend.create_profile(filepath=str(tmp_path / 'repo1')) backend1 = SqliteTempBackend(profile1) # add simple node @@ -30,7 +30,7 @@ def test_basic(tmp_path): create_archive(None, backend=backend1, filename=filename) # create a new temporary backend and import - profile2 = SqliteTempBackend.create_profile(repo_path=str(tmp_path / 'repo2')) + profile2 = SqliteTempBackend.create_profile(filepath=str(tmp_path / 'repo2')) backend2 = SqliteTempBackend(profile2) import_archive(filename, backend=backend2)