Skip to content

Zen Stores

zenml.zen_stores special

ZenStores define ways to store ZenML relevant data locally or remotely.

base_zen_store

Base Zen Store implementation.

BaseZenStore (BaseModel, ZenStoreInterface, SecretsStoreInterface, AnalyticsTrackerMixin, ABC) pydantic-model

Base class for accessing and persisting ZenML core objects.

Attributes:

Name Type Description
config StoreConfiguration

The configuration of the store.

track_analytics bool

Only send analytics if set to True.

secrets_store

The secrets store to use for storing sensitive data.

Source code in zenml/zen_stores/base_zen_store.py
class BaseZenStore(
    BaseModel,
    ZenStoreInterface,
    SecretsStoreInterface,
    AnalyticsTrackerMixin,
    ABC,
):
    """Base class for accessing and persisting ZenML core objects.

    Attributes:
        config: The configuration of the store.
        track_analytics: Only send analytics if set to `True`.
        secrets_store: The secrets store to use for storing sensitive data.
    """

    config: StoreConfiguration
    track_analytics: bool = True
    _secrets_store: Optional[BaseSecretsStore] = None
    _event_handlers: Dict[StoreEvent, List[Callable[..., Any]]] = {}

    TYPE: ClassVar[StoreType]
    CONFIG_TYPE: ClassVar[Type[StoreConfiguration]]

    # ---------------------------------
    # Initialization and configuration
    # ---------------------------------

    def __init__(
        self,
        skip_default_registrations: bool = False,
        **kwargs: Any,
    ) -> None:
        """Create and initialize a store.

        Args:
            skip_default_registrations: If `True`, the creation of the default
                stack and user in the store will be skipped.
            **kwargs: Additional keyword arguments to pass to the Pydantic
                constructor.

        Raises:
            RuntimeError: If the store cannot be initialized.
        """
        super().__init__(**kwargs)

        try:
            self._initialize()
        except Exception as e:
            raise RuntimeError(
                f"Error initializing {self.type.value} store with URL "
                f"'{self.url}': {str(e)}"
            ) from e

        if not skip_default_registrations:
            logger.debug("Initializing database")
            self._initialize_database()
        else:
            logger.debug("Skipping database initialization")

    @staticmethod
    def get_store_class(store_type: StoreType) -> Type["BaseZenStore"]:
        """Returns the class of the given store type.

        Args:
            store_type: The type of the store to get the class for.

        Returns:
            The class of the given store type or None if the type is unknown.

        Raises:
            TypeError: If the store type is unsupported.
        """
        if store_type == StoreType.SQL:
            from zenml.zen_stores.sql_zen_store import SqlZenStore

            return SqlZenStore
        elif store_type == StoreType.REST:
            from zenml.zen_stores.rest_zen_store import RestZenStore

            return RestZenStore
        else:
            raise TypeError(
                f"No store implementation found for store type "
                f"`{store_type.value}`."
            )

    @staticmethod
    def get_store_config_class(
        store_type: StoreType,
    ) -> Type["StoreConfiguration"]:
        """Returns the store config class of the given store type.

        Args:
            store_type: The type of the store to get the class for.

        Returns:
            The config class of the given store type.
        """
        store_class = BaseZenStore.get_store_class(store_type)
        return store_class.CONFIG_TYPE

    @staticmethod
    def get_store_type(url: str) -> StoreType:
        """Returns the store type associated with a URL schema.

        Args:
            url: The store URL.

        Returns:
            The store type associated with the supplied URL schema.

        Raises:
            TypeError: If no store type was found to support the supplied URL.
        """
        from zenml.zen_stores.rest_zen_store import RestZenStoreConfiguration
        from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

        if SqlZenStoreConfiguration.supports_url_scheme(url):
            return StoreType.SQL
        elif RestZenStoreConfiguration.supports_url_scheme(url):
            return StoreType.REST
        else:
            raise TypeError(f"No store implementation found for URL: {url}.")

    @staticmethod
    def create_store(
        config: StoreConfiguration,
        skip_default_registrations: bool = False,
        **kwargs: Any,
    ) -> "BaseZenStore":
        """Create and initialize a store from a store configuration.

        Args:
            config: The store configuration to use.
            skip_default_registrations: If `True`, the creation of the default
                stack and user in the store will be skipped.
            **kwargs: Additional keyword arguments to pass to the store class

        Returns:
            The initialized store.
        """
        logger.debug(f"Creating store with config '{config}'...")
        store_class = BaseZenStore.get_store_class(config.type)
        store = store_class(
            config=config,
            skip_default_registrations=skip_default_registrations,
            **kwargs,
        )

        secrets_store_config = store.config.secrets_store

        # Initialize the secrets store
        if (
            secrets_store_config
            and secrets_store_config.type != SecretsStoreType.NONE
        ):
            secrets_store_class = BaseSecretsStore.get_store_class(
                secrets_store_config
            )
            store._secrets_store = secrets_store_class(
                zen_store=store,
                config=secrets_store_config,
            )
            # Update the config with the actual secrets store config
            # to reflect the default values in the saved configuration
            store.config.secrets_store = store._secrets_store.config
        return store

    @staticmethod
    def get_default_store_config(path: str) -> StoreConfiguration:
        """Get the default store configuration.

        The default store is a SQLite store that saves the DB contents on the
        local filesystem.

        Args:
            path: The local path where the store DB will be stored.

        Returns:
            The default store configuration.
        """
        from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

        config = SqlZenStoreConfiguration(
            type=StoreType.SQL,
            url=SqlZenStoreConfiguration.get_local_url(path),
            secrets_store=SqlSecretsStoreConfiguration(
                type=SecretsStoreType.SQL,
            ),
        )
        return config

    def _initialize_database(self) -> None:
        """Initialize the database on first use."""
        try:
            default_workspace = self._default_workspace
        except KeyError:
            default_workspace = self._create_default_workspace()
        try:
            assert self._admin_role
        except KeyError:
            self._create_admin_role()
        try:
            assert self._guest_role
        except KeyError:
            self._create_guest_role()
        try:
            default_user = self._default_user
        except KeyError:
            default_user = self._create_default_user()
        try:
            self._get_default_stack(
                workspace_name_or_id=default_workspace.id,
                user_name_or_id=default_user.id,
            )
        except KeyError:
            self._create_default_stack(
                workspace_name_or_id=default_workspace.id,
                user_name_or_id=default_user.id,
            )

    @property
    def url(self) -> str:
        """The URL of the store.

        Returns:
            The URL of the store.
        """
        return self.config.url

    @property
    def type(self) -> StoreType:
        """The type of the store.

        Returns:
            The type of the store.
        """
        return self.TYPE

    @property
    def secrets_store(self) -> Optional["BaseSecretsStore"]:
        """The secrets store associated with this store.

        Returns:
            The secrets store associated with this store.
        """
        return self._secrets_store

    def validate_active_config(
        self,
        active_workspace_name_or_id: Optional[Union[str, UUID]] = None,
        active_stack_id: Optional[UUID] = None,
        config_name: str = "",
    ) -> Tuple[WorkspaceResponseModel, StackResponseModel]:
        """Validate the active configuration.

        Call this method to validate the supplied active workspace and active
        stack values.

        This method is guaranteed to return valid workspace ID and stack ID
        values. If the supplied workspace and stack are not set or are not valid
        (e.g. they do not exist or are not accessible), the default workspace and
        default workspace stack will be returned in their stead.

        Args:
            active_workspace_name_or_id: The name or ID of the active workspace.
            active_stack_id: The ID of the active stack.
            config_name: The name of the configuration to validate (used in the
                displayed logs/messages).

        Returns:
            A tuple containing the active workspace and active stack.
        """
        active_workspace: WorkspaceResponseModel

        if active_workspace_name_or_id:
            try:
                active_workspace = self.get_workspace(
                    active_workspace_name_or_id
                )
            except KeyError:
                active_workspace = self._get_or_create_default_workspace()

                logger.warning(
                    f"The current {config_name} active workspace is no longer "
                    f"available. Resetting the active workspace to "
                    f"'{active_workspace.name}'."
                )
        else:
            active_workspace = self._get_or_create_default_workspace()

            logger.info(
                f"Setting the {config_name} active workspace "
                f"to '{active_workspace.name}'."
            )

        active_stack: StackResponseModel

        # Sanitize the active stack
        if active_stack_id:
            # Ensure that the active stack is still valid
            try:
                active_stack = self.get_stack(stack_id=active_stack_id)
            except KeyError:
                logger.warning(
                    "The current %s active stack is no longer available. "
                    "Resetting the active stack to default.",
                    config_name,
                )
                active_stack = self._get_or_create_default_stack(
                    active_workspace
                )
            else:
                if active_stack.workspace.id != active_workspace.id:
                    logger.warning(
                        "The current %s active stack is not part of the active "
                        "workspace. Resetting the active stack to default.",
                        config_name,
                    )
                    active_stack = self._get_or_create_default_stack(
                        active_workspace
                    )
                elif not active_stack.is_shared and (
                    not active_stack.user
                    or (active_stack.user.id != self.get_user().id)
                ):
                    logger.warning(
                        "The current %s active stack is not shared and not "
                        "owned by the active user. "
                        "Resetting the active stack to default.",
                        config_name,
                    )
                    active_stack = self._get_or_create_default_stack(
                        active_workspace
                    )
        else:
            logger.warning(
                "Setting the %s active stack to default.",
                config_name,
            )
            active_stack = self._get_or_create_default_stack(active_workspace)

        return active_workspace, active_stack

    def get_store_info(self) -> ServerModel:
        """Get information about the store.

        Returns:
            Information about the store.
        """
        return ServerModel(
            id=GlobalConfiguration().user_id,
            version=zenml.__version__,
            deployment_type=os.environ.get(
                ENV_ZENML_SERVER_DEPLOYMENT_TYPE, ServerDeploymentType.OTHER
            ),
            database_type=ServerDatabaseType.OTHER,
            debug=IS_DEBUG_ENV,
            secrets_store_type=self.secrets_store.type
            if self.secrets_store
            else SecretsStoreType.NONE,
        )

    def is_local_store(self) -> bool:
        """Check if the store is local or connected to a local ZenML server.

        Returns:
            True if the store is local, False otherwise.
        """
        return self.get_store_info().is_local()

    def _get_or_create_default_stack(
        self, workspace: "WorkspaceResponseModel"
    ) -> "StackResponseModel":
        try:
            return self._get_default_stack(
                workspace_name_or_id=workspace.id,
                user_name_or_id=self.get_user().id,
            )
        except KeyError:
            return self._create_default_stack(  # type: ignore[no-any-return]
                workspace_name_or_id=workspace.id,
                user_name_or_id=self.get_user().id,
            )

    def _get_or_create_default_workspace(self) -> "WorkspaceResponseModel":
        try:
            return self._default_workspace
        except KeyError:
            return self._create_default_workspace()  # type: ignore[no-any-return]

    # --------------
    # Event Handlers
    # --------------

    def register_event_handler(
        self,
        event: StoreEvent,
        handler: Callable[..., Any],
    ) -> None:
        """Register an external event handler.

        The handler will be called when the store event is triggered.

        Args:
            event: The event to register the handler for.
            handler: The handler function to register.
        """
        self._event_handlers.setdefault(event, []).append(handler)

    def _trigger_event(self, event: StoreEvent, **kwargs: Any) -> None:
        """Trigger an event and call all registered handlers.

        Args:
            event: The event to trigger.
            **kwargs: The event arguments.
        """
        for handler in self._event_handlers.get(event, []):
            try:
                handler(event, **kwargs)
            except Exception as e:
                logger.error(
                    f"Silently ignoring error caught while triggering event "
                    f"store handler for event {event.value}: {e}",
                    exc_info=True,
                )

    # ------
    # Stacks
    # ------

    @track(AnalyticsEvent.REGISTERED_DEFAULT_STACK)
    def _create_default_stack(
        self,
        workspace_name_or_id: Union[str, UUID],
        user_name_or_id: Union[str, UUID],
    ) -> StackResponseModel:
        """Create the default stack components and stack.

        The default stack contains a local orchestrator and a local artifact
        store.

        Args:
            workspace_name_or_id: Name or ID of the workspace to which the stack
                belongs.
            user_name_or_id: The name or ID of the user that owns the stack.

        Returns:
            The model of the created default stack.
        """
        workspace = self.get_workspace(
            workspace_name_or_id=workspace_name_or_id
        )
        user = self.get_user(user_name_or_id=user_name_or_id)

        logger.info(
            f"Creating default stack for user '{user.name}' in workspace "
            f"{workspace.name}..."
        )

        # Register the default orchestrator
        orchestrator = self.create_stack_component(
            component=ComponentRequestModel(
                user=user.id,
                workspace=workspace.id,
                name=DEFAULT_STACK_COMPONENT_NAME,
                type=StackComponentType.ORCHESTRATOR,
                flavor="local",
                configuration={},
            ),
        )

        # Register the default artifact store
        artifact_store = self.create_stack_component(
            component=ComponentRequestModel(
                user=user.id,
                workspace=workspace.id,
                name=DEFAULT_STACK_COMPONENT_NAME,
                type=StackComponentType.ARTIFACT_STORE,
                flavor="local",
                configuration={},
            ),
        )

        components = {c.type: [c.id] for c in [orchestrator, artifact_store]}
        # Register the default stack
        stack = StackRequestModel(
            name=DEFAULT_STACK_NAME,
            components=components,
            is_shared=False,
            workspace=workspace.id,
            user=user.id,
        )
        return self.create_stack(stack=stack)

    def _get_default_stack(
        self,
        workspace_name_or_id: Union[str, UUID],
        user_name_or_id: Union[str, UUID],
    ) -> StackResponseModel:
        """Get the default stack for a user in a workspace.

        Args:
            workspace_name_or_id: Name or ID of the workspace.
            user_name_or_id: Name or ID of the user.

        Returns:
            The default stack in the workspace owned by the supplied user.

        Raises:
            KeyError: if the workspace or default stack doesn't exist.
        """
        default_stacks = self.list_stacks(
            StackFilterModel(
                workspace_id=workspace_name_or_id,
                user_id=user_name_or_id,
                name=DEFAULT_STACK_NAME,
            )
        )
        if default_stacks.total == 0:
            raise KeyError(
                f"No default stack found for user {str(user_name_or_id)} in "
                f"workspace {str(workspace_name_or_id)}"
            )
        return default_stacks.items[0]

    # -----
    # Roles
    # -----
    @property
    def _admin_role(self) -> RoleResponseModel:
        """Get the admin role.

        Returns:
            The default admin role.
        """
        return self.get_role(DEFAULT_ADMIN_ROLE)

    @track(AnalyticsEvent.CREATED_DEFAULT_ROLES)
    def _create_admin_role(self) -> RoleResponseModel:
        """Creates the admin role.

        Returns:
            The admin role
        """
        logger.info(f"Creating '{DEFAULT_ADMIN_ROLE}' role ...")
        return self.create_role(
            RoleRequestModel(
                name=DEFAULT_ADMIN_ROLE,
                permissions={
                    PermissionType.READ,
                    PermissionType.WRITE,
                    PermissionType.ME,
                },
            )
        )

    @property
    def _guest_role(self) -> RoleResponseModel:
        """Get the guest role.

        Returns:
            The guest role.
        """
        return self.get_role(DEFAULT_GUEST_ROLE)

    @track(AnalyticsEvent.CREATED_DEFAULT_ROLES)
    def _create_guest_role(self) -> RoleResponseModel:
        """Creates the guest role.

        Returns:
            The guest role
        """
        logger.info(f"Creating '{DEFAULT_GUEST_ROLE}' role ...")
        return self.create_role(
            RoleRequestModel(
                name=DEFAULT_GUEST_ROLE,
                permissions={
                    PermissionType.READ,
                    PermissionType.ME,
                },
            )
        )

    # -----
    # Users
    # -----

    @property
    def _default_user_name(self) -> str:
        """Get the default user name.

        Returns:
            The default user name.
        """
        return os.getenv(ENV_ZENML_DEFAULT_USER_NAME, DEFAULT_USERNAME)

    @property
    def _default_user(self) -> UserResponseModel:
        """Get the default user.

        Returns:
            The default user.

        Raises:
            KeyError: If the default user doesn't exist.
        """
        user_name = self._default_user_name
        try:
            return self.get_user(user_name)
        except KeyError:
            raise KeyError(f"The default user '{user_name}' is not configured")

    @track(AnalyticsEvent.CREATED_DEFAULT_USER)
    def _create_default_user(self) -> UserResponseModel:
        """Creates a default user with the admin role.

        Returns:
            The default user.
        """
        user_name = os.getenv(ENV_ZENML_DEFAULT_USER_NAME, DEFAULT_USERNAME)
        user_password = os.getenv(
            ENV_ZENML_DEFAULT_USER_PASSWORD, DEFAULT_PASSWORD
        )

        logger.info(f"Creating default user '{user_name}' ...")
        new_user = self.create_user(
            UserRequestModel(
                name=user_name,
                active=True,
                password=user_password,
            )
        )
        self.create_user_role_assignment(
            UserRoleAssignmentRequestModel(
                role=self._admin_role.id,
                user=new_user.id,
                workspace=None,
            )
        )
        return new_user

    # -----
    # Roles
    # -----

    @property
    def roles(self) -> Page[RoleResponseModel]:
        """All existing roles.

        Returns:
            A list of all existing roles.
        """
        return self.list_roles(RoleFilterModel())

    # --------
    # Workspaces
    # --------

    @property
    def _default_workspace_name(self) -> str:
        """Get the default workspace name.

        Returns:
            The default workspace name.
        """
        return os.getenv(
            ENV_ZENML_DEFAULT_WORKSPACE_NAME, DEFAULT_WORKSPACE_NAME
        )

    @property
    def _default_workspace(self) -> WorkspaceResponseModel:
        """Get the default workspace.

        Returns:
            The default workspace.

        Raises:
            KeyError: if the default workspace doesn't exist.
        """
        workspace_name = self._default_workspace_name
        try:
            return self.get_workspace(workspace_name)
        except KeyError:
            raise KeyError(
                f"The default workspace '{workspace_name}' is not configured"
            )

    @track(AnalyticsEvent.CREATED_DEFAULT_WORKSPACE)
    def _create_default_workspace(self) -> WorkspaceResponseModel:
        """Creates a default workspace.

        Returns:
            The default workspace.
        """
        workspace_name = self._default_workspace_name
        logger.info(f"Creating default workspace '{workspace_name}' ...")
        return self.create_workspace(
            WorkspaceRequestModel(name=workspace_name)
        )

    # ---------
    # Analytics
    # ---------

    def track_event(
        self,
        event: AnalyticsEvent,
        metadata: Optional[Dict[str, Any]] = None,
    ) -> None:
        """Track an analytics event.

        Args:
            event: The event to track.
            metadata: Additional metadata to track with the event.
        """
        if self.track_analytics:
            # Server information is always tracked, if available.
            track_event(event, metadata)

    class Config:
        """Pydantic configuration class."""

        # Validate attributes when assigning them. We need to set this in order
        # to have a mix of mutable and immutable attributes
        validate_assignment = True
        # Ignore extra attributes from configs of previous ZenML versions
        extra = "ignore"
        # all attributes with leading underscore are private and therefore
        # are mutable and not included in serialization
        underscore_attrs_are_private = True
roles: Page[RoleResponseModel] property readonly

All existing roles.

Returns:

Type Description
Page[RoleResponseModel]

A list of all existing roles.

secrets_store: Optional[BaseSecretsStore] property readonly

The secrets store associated with this store.

Returns:

Type Description
Optional[BaseSecretsStore]

The secrets store associated with this store.

type: StoreType property readonly

The type of the store.

Returns:

Type Description
StoreType

The type of the store.

url: str property readonly

The URL of the store.

Returns:

Type Description
str

The URL of the store.

Config

Pydantic configuration class.

Source code in zenml/zen_stores/base_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Validate attributes when assigning them. We need to set this in order
    # to have a mix of mutable and immutable attributes
    validate_assignment = True
    # Ignore extra attributes from configs of previous ZenML versions
    extra = "ignore"
    # all attributes with leading underscore are private and therefore
    # are mutable and not included in serialization
    underscore_attrs_are_private = True
__init__(self, skip_default_registrations=False, **kwargs) special

Create and initialize a store.

Parameters:

Name Type Description Default
skip_default_registrations bool

If True, the creation of the default stack and user in the store will be skipped.

False
**kwargs Any

Additional keyword arguments to pass to the Pydantic constructor.

{}

Exceptions:

Type Description
RuntimeError

If the store cannot be initialized.

Source code in zenml/zen_stores/base_zen_store.py
def __init__(
    self,
    skip_default_registrations: bool = False,
    **kwargs: Any,
) -> None:
    """Create and initialize a store.

    Args:
        skip_default_registrations: If `True`, the creation of the default
            stack and user in the store will be skipped.
        **kwargs: Additional keyword arguments to pass to the Pydantic
            constructor.

    Raises:
        RuntimeError: If the store cannot be initialized.
    """
    super().__init__(**kwargs)

    try:
        self._initialize()
    except Exception as e:
        raise RuntimeError(
            f"Error initializing {self.type.value} store with URL "
            f"'{self.url}': {str(e)}"
        ) from e

    if not skip_default_registrations:
        logger.debug("Initializing database")
        self._initialize_database()
    else:
        logger.debug("Skipping database initialization")
create_secret(self, secret)

Creates a new secret.

The new secret is also validated against the scoping rules enforced in the secrets store:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret SecretRequestModel

The secret to create.

required

Returns:

Type Description
SecretResponseModel

The newly created secret.

Exceptions:

Type Description
KeyError

if the user or workspace does not exist.

EntityExistsError

If a secret with the same name already exists in the same scope.

ValueError

if the secret is invalid.

Source code in zenml/zen_stores/base_zen_store.py
@abstractmethod
def create_secret(
    self,
    secret: SecretRequestModel,
) -> SecretResponseModel:
    """Creates a new secret.

    The new secret is also validated against the scoping rules enforced in
    the secrets store:

      - only one workspace-scoped secret with the given name can exist
        in the target workspace.
      - only one user-scoped secret with the given name can exist in the
        target workspace for the target user.

    Args:
        secret: The secret to create.

    Returns:
        The newly created secret.

    Raises:
        KeyError: if the user or workspace does not exist.
        EntityExistsError: If a secret with the same name already exists in
            the same scope.
        ValueError: if the secret is invalid.
    """
create_store(config, skip_default_registrations=False, **kwargs) staticmethod

Create and initialize a store from a store configuration.

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to use.

required
skip_default_registrations bool

If True, the creation of the default stack and user in the store will be skipped.

False
**kwargs Any

Additional keyword arguments to pass to the store class

{}

Returns:

Type Description
BaseZenStore

The initialized store.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def create_store(
    config: StoreConfiguration,
    skip_default_registrations: bool = False,
    **kwargs: Any,
) -> "BaseZenStore":
    """Create and initialize a store from a store configuration.

    Args:
        config: The store configuration to use.
        skip_default_registrations: If `True`, the creation of the default
            stack and user in the store will be skipped.
        **kwargs: Additional keyword arguments to pass to the store class

    Returns:
        The initialized store.
    """
    logger.debug(f"Creating store with config '{config}'...")
    store_class = BaseZenStore.get_store_class(config.type)
    store = store_class(
        config=config,
        skip_default_registrations=skip_default_registrations,
        **kwargs,
    )

    secrets_store_config = store.config.secrets_store

    # Initialize the secrets store
    if (
        secrets_store_config
        and secrets_store_config.type != SecretsStoreType.NONE
    ):
        secrets_store_class = BaseSecretsStore.get_store_class(
            secrets_store_config
        )
        store._secrets_store = secrets_store_class(
            zen_store=store,
            config=secrets_store_config,
        )
        # Update the config with the actual secrets store config
        # to reflect the default values in the saved configuration
        store.config.secrets_store = store._secrets_store.config
    return store
delete_secret(self, secret_id)

Deletes a secret.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to delete.

required

Exceptions:

Type Description
KeyError

if the secret doesn't exist.

Source code in zenml/zen_stores/base_zen_store.py
@abstractmethod
def delete_secret(self, secret_id: UUID) -> None:
    """Deletes a secret.

    Args:
        secret_id: The ID of the secret to delete.

    Raises:
        KeyError: if the secret doesn't exist.
    """
get_default_store_config(path) staticmethod

Get the default store configuration.

The default store is a SQLite store that saves the DB contents on the local filesystem.

Parameters:

Name Type Description Default
path str

The local path where the store DB will be stored.

required

Returns:

Type Description
StoreConfiguration

The default store configuration.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def get_default_store_config(path: str) -> StoreConfiguration:
    """Get the default store configuration.

    The default store is a SQLite store that saves the DB contents on the
    local filesystem.

    Args:
        path: The local path where the store DB will be stored.

    Returns:
        The default store configuration.
    """
    from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

    config = SqlZenStoreConfiguration(
        type=StoreType.SQL,
        url=SqlZenStoreConfiguration.get_local_url(path),
        secrets_store=SqlSecretsStoreConfiguration(
            type=SecretsStoreType.SQL,
        ),
    )
    return config
get_secret(self, secret_id)

Get a secret with a given name.

Parameters:

Name Type Description Default
secret_id UUID

ID of the secret.

required

Returns:

Type Description
SecretResponseModel

The secret.

Exceptions:

Type Description
KeyError

if the secret does not exist.

Source code in zenml/zen_stores/base_zen_store.py
@abstractmethod
def get_secret(self, secret_id: UUID) -> SecretResponseModel:
    """Get a secret with a given name.

    Args:
        secret_id: ID of the secret.

    Returns:
        The secret.

    Raises:
        KeyError: if the secret does not exist.
    """
get_store_class(store_type) staticmethod

Returns the class of the given store type.

Parameters:

Name Type Description Default
store_type StoreType

The type of the store to get the class for.

required

Returns:

Type Description
Type[BaseZenStore]

The class of the given store type or None if the type is unknown.

Exceptions:

Type Description
TypeError

If the store type is unsupported.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def get_store_class(store_type: StoreType) -> Type["BaseZenStore"]:
    """Returns the class of the given store type.

    Args:
        store_type: The type of the store to get the class for.

    Returns:
        The class of the given store type or None if the type is unknown.

    Raises:
        TypeError: If the store type is unsupported.
    """
    if store_type == StoreType.SQL:
        from zenml.zen_stores.sql_zen_store import SqlZenStore

        return SqlZenStore
    elif store_type == StoreType.REST:
        from zenml.zen_stores.rest_zen_store import RestZenStore

        return RestZenStore
    else:
        raise TypeError(
            f"No store implementation found for store type "
            f"`{store_type.value}`."
        )
get_store_config_class(store_type) staticmethod

Returns the store config class of the given store type.

Parameters:

Name Type Description Default
store_type StoreType

The type of the store to get the class for.

required

Returns:

Type Description
Type[StoreConfiguration]

The config class of the given store type.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def get_store_config_class(
    store_type: StoreType,
) -> Type["StoreConfiguration"]:
    """Returns the store config class of the given store type.

    Args:
        store_type: The type of the store to get the class for.

    Returns:
        The config class of the given store type.
    """
    store_class = BaseZenStore.get_store_class(store_type)
    return store_class.CONFIG_TYPE
get_store_info(self)

Get information about the store.

Returns:

Type Description
ServerModel

Information about the store.

Source code in zenml/zen_stores/base_zen_store.py
def get_store_info(self) -> ServerModel:
    """Get information about the store.

    Returns:
        Information about the store.
    """
    return ServerModel(
        id=GlobalConfiguration().user_id,
        version=zenml.__version__,
        deployment_type=os.environ.get(
            ENV_ZENML_SERVER_DEPLOYMENT_TYPE, ServerDeploymentType.OTHER
        ),
        database_type=ServerDatabaseType.OTHER,
        debug=IS_DEBUG_ENV,
        secrets_store_type=self.secrets_store.type
        if self.secrets_store
        else SecretsStoreType.NONE,
    )
get_store_type(url) staticmethod

Returns the store type associated with a URL schema.

Parameters:

Name Type Description Default
url str

The store URL.

required

Returns:

Type Description
StoreType

The store type associated with the supplied URL schema.

Exceptions:

Type Description
TypeError

If no store type was found to support the supplied URL.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def get_store_type(url: str) -> StoreType:
    """Returns the store type associated with a URL schema.

    Args:
        url: The store URL.

    Returns:
        The store type associated with the supplied URL schema.

    Raises:
        TypeError: If no store type was found to support the supplied URL.
    """
    from zenml.zen_stores.rest_zen_store import RestZenStoreConfiguration
    from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

    if SqlZenStoreConfiguration.supports_url_scheme(url):
        return StoreType.SQL
    elif RestZenStoreConfiguration.supports_url_scheme(url):
        return StoreType.REST
    else:
        raise TypeError(f"No store implementation found for URL: {url}.")
is_local_store(self)

Check if the store is local or connected to a local ZenML server.

Returns:

Type Description
bool

True if the store is local, False otherwise.

Source code in zenml/zen_stores/base_zen_store.py
def is_local_store(self) -> bool:
    """Check if the store is local or connected to a local ZenML server.

    Returns:
        True if the store is local, False otherwise.
    """
    return self.get_store_info().is_local()
list_secrets(self, secret_filter_model)

List all secrets matching the given filter criteria.

Note that returned secrets do not include any secret values. To fetch the secret values, use get_secret.

Parameters:

Name Type Description Default
secret_filter_model SecretFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[SecretResponseModel]

A list of all secrets matching the filter criteria, with pagination information and sorted according to the filter criteria. The returned secrets do not include any secret values, only metadata. To fetch the secret values, use get_secret individually with each secret.

Source code in zenml/zen_stores/base_zen_store.py
@abstractmethod
def list_secrets(
    self, secret_filter_model: SecretFilterModel
) -> Page[SecretResponseModel]:
    """List all secrets matching the given filter criteria.

    Note that returned secrets do not include any secret values. To fetch
    the secret values, use `get_secret`.

    Args:
        secret_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all secrets matching the filter criteria, with pagination
        information and sorted according to the filter criteria. The
        returned secrets do not include any secret values, only metadata. To
        fetch the secret values, use `get_secret` individually with each
        secret.
    """
register_event_handler(self, event, handler)

Register an external event handler.

The handler will be called when the store event is triggered.

Parameters:

Name Type Description Default
event StoreEvent

The event to register the handler for.

required
handler Callable[..., Any]

The handler function to register.

required
Source code in zenml/zen_stores/base_zen_store.py
def register_event_handler(
    self,
    event: StoreEvent,
    handler: Callable[..., Any],
) -> None:
    """Register an external event handler.

    The handler will be called when the store event is triggered.

    Args:
        event: The event to register the handler for.
        handler: The handler function to register.
    """
    self._event_handlers.setdefault(event, []).append(handler)
track_event(self, event, metadata=None)

Track an analytics event.

Parameters:

Name Type Description Default
event AnalyticsEvent

The event to track.

required
metadata Optional[Dict[str, Any]]

Additional metadata to track with the event.

None
Source code in zenml/zen_stores/base_zen_store.py
def track_event(
    self,
    event: AnalyticsEvent,
    metadata: Optional[Dict[str, Any]] = None,
) -> None:
    """Track an analytics event.

    Args:
        event: The event to track.
        metadata: Additional metadata to track with the event.
    """
    if self.track_analytics:
        # Server information is always tracked, if available.
        track_event(event, metadata)
update_secret(self, secret_id, secret_update)

Updates a secret.

Secret values that are specified as None in the update that are present in the existing secret are removed from the existing secret. Values that are present in both secrets are overwritten. All other values in both the existing secret and the update are kept (merged).

If the update includes a change of name or scope, the scoping rules enforced in the secrets store are used to validate the update:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to be updated.

required
secret_update SecretUpdateModel

The update to be applied.

required

Returns:

Type Description
SecretResponseModel

The updated secret.

Exceptions:

Type Description
KeyError

if the secret doesn't exist.

EntityExistsError

If a secret with the same name already exists in the same scope.

ValueError

if the secret is invalid.

Source code in zenml/zen_stores/base_zen_store.py
@abstractmethod
def update_secret(
    self,
    secret_id: UUID,
    secret_update: SecretUpdateModel,
) -> SecretResponseModel:
    """Updates a secret.

    Secret values that are specified as `None` in the update that are
    present in the existing secret are removed from the existing secret.
    Values that are present in both secrets are overwritten. All other
    values in both the existing secret and the update are kept (merged).

    If the update includes a change of name or scope, the scoping rules
    enforced in the secrets store are used to validate the update:

      - only one workspace-scoped secret with the given name can exist
        in the target workspace.
      - only one user-scoped secret with the given name can exist in the
        target workspace for the target user.

    Args:
        secret_id: The ID of the secret to be updated.
        secret_update: The update to be applied.

    Returns:
        The updated secret.

    Raises:
        KeyError: if the secret doesn't exist.
        EntityExistsError: If a secret with the same name already exists in
            the same scope.
        ValueError: if the secret is invalid.
    """
validate_active_config(self, active_workspace_name_or_id=None, active_stack_id=None, config_name='')

Validate the active configuration.

Call this method to validate the supplied active workspace and active stack values.

This method is guaranteed to return valid workspace ID and stack ID values. If the supplied workspace and stack are not set or are not valid (e.g. they do not exist or are not accessible), the default workspace and default workspace stack will be returned in their stead.

Parameters:

Name Type Description Default
active_workspace_name_or_id Union[str, uuid.UUID]

The name or ID of the active workspace.

None
active_stack_id Optional[uuid.UUID]

The ID of the active stack.

None
config_name str

The name of the configuration to validate (used in the displayed logs/messages).

''

Returns:

Type Description
Tuple[zenml.models.workspace_models.WorkspaceResponseModel, zenml.models.stack_models.StackResponseModel]

A tuple containing the active workspace and active stack.

Source code in zenml/zen_stores/base_zen_store.py
def validate_active_config(
    self,
    active_workspace_name_or_id: Optional[Union[str, UUID]] = None,
    active_stack_id: Optional[UUID] = None,
    config_name: str = "",
) -> Tuple[WorkspaceResponseModel, StackResponseModel]:
    """Validate the active configuration.

    Call this method to validate the supplied active workspace and active
    stack values.

    This method is guaranteed to return valid workspace ID and stack ID
    values. If the supplied workspace and stack are not set or are not valid
    (e.g. they do not exist or are not accessible), the default workspace and
    default workspace stack will be returned in their stead.

    Args:
        active_workspace_name_or_id: The name or ID of the active workspace.
        active_stack_id: The ID of the active stack.
        config_name: The name of the configuration to validate (used in the
            displayed logs/messages).

    Returns:
        A tuple containing the active workspace and active stack.
    """
    active_workspace: WorkspaceResponseModel

    if active_workspace_name_or_id:
        try:
            active_workspace = self.get_workspace(
                active_workspace_name_or_id
            )
        except KeyError:
            active_workspace = self._get_or_create_default_workspace()

            logger.warning(
                f"The current {config_name} active workspace is no longer "
                f"available. Resetting the active workspace to "
                f"'{active_workspace.name}'."
            )
    else:
        active_workspace = self._get_or_create_default_workspace()

        logger.info(
            f"Setting the {config_name} active workspace "
            f"to '{active_workspace.name}'."
        )

    active_stack: StackResponseModel

    # Sanitize the active stack
    if active_stack_id:
        # Ensure that the active stack is still valid
        try:
            active_stack = self.get_stack(stack_id=active_stack_id)
        except KeyError:
            logger.warning(
                "The current %s active stack is no longer available. "
                "Resetting the active stack to default.",
                config_name,
            )
            active_stack = self._get_or_create_default_stack(
                active_workspace
            )
        else:
            if active_stack.workspace.id != active_workspace.id:
                logger.warning(
                    "The current %s active stack is not part of the active "
                    "workspace. Resetting the active stack to default.",
                    config_name,
                )
                active_stack = self._get_or_create_default_stack(
                    active_workspace
                )
            elif not active_stack.is_shared and (
                not active_stack.user
                or (active_stack.user.id != self.get_user().id)
            ):
                logger.warning(
                    "The current %s active stack is not shared and not "
                    "owned by the active user. "
                    "Resetting the active stack to default.",
                    config_name,
                )
                active_stack = self._get_or_create_default_stack(
                    active_workspace
                )
    else:
        logger.warning(
            "Setting the %s active stack to default.",
            config_name,
        )
        active_stack = self._get_or_create_default_stack(active_workspace)

    return active_workspace, active_stack

enums

Zen Store enums.

StoreEvent (StrEnum)

Events that can be triggered by the store.

Source code in zenml/zen_stores/enums.py
class StoreEvent(StrEnum):
    """Events that can be triggered by the store."""

    # Triggered just before deleting a workspace. The workspace ID is passed as
    # a `workspace_id` UUID argument.
    WORKSPACE_DELETED = "workspace_deleted"
    # Triggered just before deleting a user. The user ID is passed as
    # a `user_id` UUID argument.
    USER_DELETED = "user_deleted"

migrations special

Alembic database migration utilities.

alembic

Alembic utilities wrapper.

The Alembic class defined here acts as a wrapper around the Alembic library that automatically configures Alembic to use the ZenML SQL store database connection.

Alembic

Alembic environment and migration API.

This class provides a wrapper around the Alembic library that automatically configures Alembic to use the ZenML SQL store database connection.

Source code in zenml/zen_stores/migrations/alembic.py
class Alembic:
    """Alembic environment and migration API.

    This class provides a wrapper around the Alembic library that automatically
    configures Alembic to use the ZenML SQL store database connection.
    """

    def __init__(
        self,
        engine: Engine,
        metadata: MetaData = SQLModel.metadata,
        context: Optional[EnvironmentContext] = None,
        **kwargs: Any,
    ) -> None:
        """Initialize the Alembic wrapper.

        Args:
            engine: The SQLAlchemy engine to use.
            metadata: The SQLAlchemy metadata to use.
            context: The Alembic environment context to use. If not set, a new
                context is created pointing to the ZenML migrations directory.
            **kwargs: Additional keyword arguments to pass to the Alembic
                environment context.
        """
        self.engine = engine
        self.metadata = metadata
        self.context_kwargs = kwargs

        self.config = Config()
        self.config.set_main_option(
            "script_location", str(Path(__file__).parent)
        )
        self.config.set_main_option(
            "version_locations", str(Path(__file__).parent / "versions")
        )

        self.script_directory = ScriptDirectory.from_config(self.config)
        if context is None:
            self.environment_context = EnvironmentContext(
                self.config, self.script_directory
            )
        else:
            self.environment_context = context

    def db_is_empty(self) -> bool:
        """Check if the database is empty.

        Returns:
            True if the database is empty, False otherwise.
        """
        # Check the existence of any of the SQLModel tables
        return not self.engine.dialect.has_table(
            self.engine.connect(), schemas.StackSchema.__tablename__
        )

    def run_migrations(
        self,
        fn: Optional[Callable[[_RevIdType, MigrationContext], List[Any]]],
    ) -> None:
        """Run an online migration function in the current migration context.

        Args:
            fn: Migration function to run. If not set, the function configured
                externally by the Alembic CLI command is used.
        """
        fn_context_args: Dict[Any, Any] = {}
        if fn is not None:
            fn_context_args["fn"] = fn

        with self.engine.connect() as connection:
            self.environment_context.configure(
                connection=connection,
                target_metadata=self.metadata,
                include_object=include_object,
                compare_type=True,
                render_as_batch=True,
                **fn_context_args,
                **self.context_kwargs,
            )

            with self.environment_context.begin_transaction():
                self.environment_context.run_migrations()

    def current_revisions(self) -> List[str]:
        """Get the current database revisions.

        Returns:
            List of head revisions.
        """
        current_revisions: List[str] = []

        def do_get_current_rev(rev: _RevIdType, context: Any) -> List[Any]:
            nonlocal current_revisions

            for r in self.script_directory.get_all_current(
                rev  # type:ignore [arg-type]
            ):
                if r is None:
                    continue
                current_revisions.append(r.revision)
            return []

        self.run_migrations(do_get_current_rev)

        return current_revisions

    def stamp(self, revision: str) -> None:
        """Stamp the revision table with the given revision without running any migrations.

        Args:
            revision: String revision target.
        """

        def do_stamp(rev: _RevIdType, context: Any) -> List[Any]:
            return self.script_directory._stamp_revs(revision, rev)

        self.run_migrations(do_stamp)

    def upgrade(self, revision: str = "heads") -> None:
        """Upgrade the database to a later version.

        Args:
            revision: String revision target.
        """

        def do_upgrade(rev: _RevIdType, context: Any) -> List[Any]:
            return self.script_directory._upgrade_revs(
                revision, rev  # type:ignore [arg-type]
            )

        self.run_migrations(do_upgrade)

    def downgrade(self, revision: str) -> None:
        """Revert the database to a previous version.

        Args:
            revision: String revision target.
        """

        def do_downgrade(rev: _RevIdType, context: Any) -> List[Any]:
            return self.script_directory._downgrade_revs(
                revision, rev  # type:ignore [arg-type]
            )

        self.run_migrations(do_downgrade)
__init__(self, engine, metadata=MetaData(), context=None, **kwargs) special

Initialize the Alembic wrapper.

Parameters:

Name Type Description Default
engine Engine

The SQLAlchemy engine to use.

required
metadata MetaData

The SQLAlchemy metadata to use.

MetaData()
context Optional[alembic.runtime.environment.EnvironmentContext]

The Alembic environment context to use. If not set, a new context is created pointing to the ZenML migrations directory.

None
**kwargs Any

Additional keyword arguments to pass to the Alembic environment context.

{}
Source code in zenml/zen_stores/migrations/alembic.py
def __init__(
    self,
    engine: Engine,
    metadata: MetaData = SQLModel.metadata,
    context: Optional[EnvironmentContext] = None,
    **kwargs: Any,
) -> None:
    """Initialize the Alembic wrapper.

    Args:
        engine: The SQLAlchemy engine to use.
        metadata: The SQLAlchemy metadata to use.
        context: The Alembic environment context to use. If not set, a new
            context is created pointing to the ZenML migrations directory.
        **kwargs: Additional keyword arguments to pass to the Alembic
            environment context.
    """
    self.engine = engine
    self.metadata = metadata
    self.context_kwargs = kwargs

    self.config = Config()
    self.config.set_main_option(
        "script_location", str(Path(__file__).parent)
    )
    self.config.set_main_option(
        "version_locations", str(Path(__file__).parent / "versions")
    )

    self.script_directory = ScriptDirectory.from_config(self.config)
    if context is None:
        self.environment_context = EnvironmentContext(
            self.config, self.script_directory
        )
    else:
        self.environment_context = context
current_revisions(self)

Get the current database revisions.

Returns:

Type Description
List[str]

List of head revisions.

Source code in zenml/zen_stores/migrations/alembic.py
def current_revisions(self) -> List[str]:
    """Get the current database revisions.

    Returns:
        List of head revisions.
    """
    current_revisions: List[str] = []

    def do_get_current_rev(rev: _RevIdType, context: Any) -> List[Any]:
        nonlocal current_revisions

        for r in self.script_directory.get_all_current(
            rev  # type:ignore [arg-type]
        ):
            if r is None:
                continue
            current_revisions.append(r.revision)
        return []

    self.run_migrations(do_get_current_rev)

    return current_revisions
db_is_empty(self)

Check if the database is empty.

Returns:

Type Description
bool

True if the database is empty, False otherwise.

Source code in zenml/zen_stores/migrations/alembic.py
def db_is_empty(self) -> bool:
    """Check if the database is empty.

    Returns:
        True if the database is empty, False otherwise.
    """
    # Check the existence of any of the SQLModel tables
    return not self.engine.dialect.has_table(
        self.engine.connect(), schemas.StackSchema.__tablename__
    )
downgrade(self, revision)

Revert the database to a previous version.

Parameters:

Name Type Description Default
revision str

String revision target.

required
Source code in zenml/zen_stores/migrations/alembic.py
def downgrade(self, revision: str) -> None:
    """Revert the database to a previous version.

    Args:
        revision: String revision target.
    """

    def do_downgrade(rev: _RevIdType, context: Any) -> List[Any]:
        return self.script_directory._downgrade_revs(
            revision, rev  # type:ignore [arg-type]
        )

    self.run_migrations(do_downgrade)
run_migrations(self, fn)

Run an online migration function in the current migration context.

Parameters:

Name Type Description Default
fn Optional[Callable[[Union[str, Sequence[str]], alembic.runtime.migration.MigrationContext], List[Any]]]

Migration function to run. If not set, the function configured externally by the Alembic CLI command is used.

required
Source code in zenml/zen_stores/migrations/alembic.py
def run_migrations(
    self,
    fn: Optional[Callable[[_RevIdType, MigrationContext], List[Any]]],
) -> None:
    """Run an online migration function in the current migration context.

    Args:
        fn: Migration function to run. If not set, the function configured
            externally by the Alembic CLI command is used.
    """
    fn_context_args: Dict[Any, Any] = {}
    if fn is not None:
        fn_context_args["fn"] = fn

    with self.engine.connect() as connection:
        self.environment_context.configure(
            connection=connection,
            target_metadata=self.metadata,
            include_object=include_object,
            compare_type=True,
            render_as_batch=True,
            **fn_context_args,
            **self.context_kwargs,
        )

        with self.environment_context.begin_transaction():
            self.environment_context.run_migrations()
stamp(self, revision)

Stamp the revision table with the given revision without running any migrations.

Parameters:

Name Type Description Default
revision str

String revision target.

required
Source code in zenml/zen_stores/migrations/alembic.py
def stamp(self, revision: str) -> None:
    """Stamp the revision table with the given revision without running any migrations.

    Args:
        revision: String revision target.
    """

    def do_stamp(rev: _RevIdType, context: Any) -> List[Any]:
        return self.script_directory._stamp_revs(revision, rev)

    self.run_migrations(do_stamp)
upgrade(self, revision='heads')

Upgrade the database to a later version.

Parameters:

Name Type Description Default
revision str

String revision target.

'heads'
Source code in zenml/zen_stores/migrations/alembic.py
def upgrade(self, revision: str = "heads") -> None:
    """Upgrade the database to a later version.

    Args:
        revision: String revision target.
    """

    def do_upgrade(rev: _RevIdType, context: Any) -> List[Any]:
        return self.script_directory._upgrade_revs(
            revision, rev  # type:ignore [arg-type]
        )

    self.run_migrations(do_upgrade)
AlembicVersion (Base)

Alembic version table.

Source code in zenml/zen_stores/migrations/alembic.py
class AlembicVersion(Base):  # type: ignore[valid-type,misc]
    """Alembic version table."""

    __tablename__ = "alembic_version"
    version_num = Column(String, nullable=False, primary_key=True)
include_object(object, name, type_, *args, **kwargs)

Function used to exclude tables from the migration scripts.

Parameters:

Name Type Description Default
object Any

The schema item object to check.

required
name str

The name of the object to check.

required
type_ str

The type of the object to check.

required
*args Any

Additional arguments.

()
**kwargs Any

Additional keyword arguments.

{}

Returns:

Type Description
bool

True if the object should be included, False otherwise.

Source code in zenml/zen_stores/migrations/alembic.py
def include_object(
    object: Any, name: str, type_: str, *args: Any, **kwargs: Any
) -> bool:
    """Function used to exclude tables from the migration scripts.

    Args:
        object: The schema item object to check.
        name: The name of the object to check.
        type_: The type of the object to check.
        *args: Additional arguments.
        **kwargs: Additional keyword arguments.

    Returns:
        True if the object should be included, False otherwise.
    """
    return not (type_ == "table" and name in exclude_tables)

rest_zen_store

REST Zen Store implementation.

RestZenStore (BaseZenStore) pydantic-model

Store implementation for accessing data from a REST API.

Source code in zenml/zen_stores/rest_zen_store.py
class RestZenStore(BaseZenStore):
    """Store implementation for accessing data from a REST API."""

    config: RestZenStoreConfiguration
    TYPE: ClassVar[StoreType] = StoreType.REST
    CONFIG_TYPE: ClassVar[Type[StoreConfiguration]] = RestZenStoreConfiguration
    _api_token: Optional[str] = None
    _session: Optional[requests.Session] = None

    def _initialize_database(self) -> None:
        """Initialize the database."""
        # don't do anything for a REST store

    # ====================================
    # ZenML Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the REST store."""
        client_version = zenml.__version__
        server_version = self.get_store_info().version

        if not DISABLE_CLIENT_SERVER_MISMATCH_WARNING and (
            server_version != client_version
        ):
            logger.warning(
                "Your ZenML client version (%s) does not match the server "
                "version (%s). This version mismatch might lead to errors or "
                "unexpected behavior. \nTo disable this warning message, set "
                "the environment variable `%s=True`",
                client_version,
                server_version,
                ENV_ZENML_DISABLE_CLIENT_SERVER_MISMATCH_WARNING,
            )

    def get_store_info(self) -> ServerModel:
        """Get information about the server.

        Returns:
            Information about the server.
        """
        body = self.get(INFO)
        return ServerModel.parse_obj(body)

    # ------
    # Stacks
    # ------

    @track(AnalyticsEvent.REGISTERED_STACK)
    def create_stack(self, stack: StackRequestModel) -> StackResponseModel:
        """Register a new stack.

        Args:
            stack: The stack to register.

        Returns:
            The registered stack.
        """
        return self._create_workspace_scoped_resource(
            resource=stack,
            route=STACKS,
            response_model=StackResponseModel,
        )

    def get_stack(self, stack_id: UUID) -> StackResponseModel:
        """Get a stack by its unique ID.

        Args:
            stack_id: The ID of the stack to get.

        Returns:
            The stack with the given ID.
        """
        return self._get_resource(
            resource_id=stack_id,
            route=STACKS,
            response_model=StackResponseModel,
        )

    def list_stacks(
        self, stack_filter_model: StackFilterModel
    ) -> Page[StackResponseModel]:
        """List all stacks matching the given filter criteria.

        Args:
            stack_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all stacks matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=STACKS,
            response_model=StackResponseModel,
            filter_model=stack_filter_model,
        )

    @track(AnalyticsEvent.UPDATED_STACK)
    def update_stack(
        self, stack_id: UUID, stack_update: StackUpdateModel
    ) -> StackResponseModel:
        """Update a stack.

        Args:
            stack_id: The ID of the stack update.
            stack_update: The update request on the stack.

        Returns:
            The updated stack.
        """
        return self._update_resource(
            resource_id=stack_id,
            resource_update=stack_update,
            route=STACKS,
            response_model=StackResponseModel,
        )

    @track(AnalyticsEvent.DELETED_STACK)
    def delete_stack(self, stack_id: UUID) -> None:
        """Delete a stack.

        Args:
            stack_id: The ID of the stack to delete.
        """
        self._delete_resource(
            resource_id=stack_id,
            route=STACKS,
        )

    # ----------------
    # Stack components
    # ----------------

    @track(AnalyticsEvent.REGISTERED_STACK_COMPONENT)
    def create_stack_component(
        self,
        component: ComponentRequestModel,
    ) -> ComponentResponseModel:
        """Create a stack component.

        Args:
            component: The stack component to create.

        Returns:
            The created stack component.
        """
        return self._create_workspace_scoped_resource(
            resource=component,
            route=STACK_COMPONENTS,
            response_model=ComponentResponseModel,
        )

    def get_stack_component(
        self, component_id: UUID
    ) -> ComponentResponseModel:
        """Get a stack component by ID.

        Args:
            component_id: The ID of the stack component to get.

        Returns:
            The stack component.
        """
        return self._get_resource(
            resource_id=component_id,
            route=STACK_COMPONENTS,
            response_model=ComponentResponseModel,
        )

    def list_stack_components(
        self, component_filter_model: ComponentFilterModel
    ) -> Page[ComponentResponseModel]:
        """List all stack components matching the given filter criteria.

        Args:
            component_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all stack components matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=STACK_COMPONENTS,
            response_model=ComponentResponseModel,
            filter_model=component_filter_model,
        )

    @track(AnalyticsEvent.UPDATED_STACK_COMPONENT)
    def update_stack_component(
        self,
        component_id: UUID,
        component_update: ComponentUpdateModel,
    ) -> ComponentResponseModel:
        """Update an existing stack component.

        Args:
            component_id: The ID of the stack component to update.
            component_update: The update to be applied to the stack component.

        Returns:
            The updated stack component.
        """
        return self._update_resource(
            resource_id=component_id,
            resource_update=component_update,
            route=STACK_COMPONENTS,
            response_model=ComponentResponseModel,
        )

    @track(AnalyticsEvent.DELETED_STACK_COMPONENT)
    def delete_stack_component(self, component_id: UUID) -> None:
        """Delete a stack component.

        Args:
            component_id: The ID of the stack component to delete.
        """
        self._delete_resource(
            resource_id=component_id,
            route=STACK_COMPONENTS,
        )

    # -----------------------
    # Stack component flavors
    # -----------------------

    @track(AnalyticsEvent.CREATED_FLAVOR)
    def create_flavor(self, flavor: FlavorRequestModel) -> FlavorResponseModel:
        """Creates a new stack component flavor.

        Args:
            flavor: The stack component flavor to create.

        Returns:
            The newly created flavor.
        """
        return self._create_resource(
            resource=flavor,
            route=FLAVORS,
            response_model=FlavorResponseModel,
        )

    def update_flavor(
        self, flavor_id: UUID, flavor_update: FlavorUpdateModel
    ) -> FlavorResponseModel:
        """Updates an existing user.

        Args:
            flavor_id: The id of the flavor to update.
            flavor_update: The update to be applied to the flavor.

        Returns:
            The updated flavor.
        """
        return self._update_resource(
            resource_id=flavor_id,
            resource_update=flavor_update,
            route=FLAVORS,
            response_model=FlavorResponseModel,
        )

    def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
        """Get a stack component flavor by ID.

        Args:
            flavor_id: The ID of the stack component flavor to get.

        Returns:
            The stack component flavor.
        """
        return self._get_resource(
            resource_id=flavor_id,
            route=FLAVORS,
            response_model=FlavorResponseModel,
        )

    def list_flavors(
        self, flavor_filter_model: FlavorFilterModel
    ) -> Page[FlavorResponseModel]:
        """List all stack component flavors matching the given filter criteria.

        Args:
            flavor_filter_model: All filter parameters including pagination
                params

        Returns:
            List of all the stack component flavors matching the given criteria.
        """
        return self._list_paginated_resources(
            route=FLAVORS,
            response_model=FlavorResponseModel,
            filter_model=flavor_filter_model,
        )

    @track(AnalyticsEvent.DELETED_FLAVOR)
    def delete_flavor(self, flavor_id: UUID) -> None:
        """Delete a stack component flavor.

        Args:
            flavor_id: The ID of the stack component flavor to delete.
        """
        self._delete_resource(
            resource_id=flavor_id,
            route=FLAVORS,
        )

    # -----
    # Users
    # -----

    @track(AnalyticsEvent.CREATED_USER)
    def create_user(self, user: UserRequestModel) -> UserResponseModel:
        """Creates a new user.

        Args:
            user: User to be created.

        Returns:
            The newly created user.
        """
        return self._create_resource(
            resource=user,
            route=USERS + "?assign_default_role=False",
            response_model=UserResponseModel,
        )

    def get_user(
        self,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        include_private: bool = False,
    ) -> UserResponseModel:
        """Gets a specific user, when no id is specified the active user is returned.

        The `include_private` parameter is ignored here as it is handled
        implicitly by the /current-user endpoint that is queried when no
        user_name_or_id is set. Raises a KeyError in case a user with that id
        does not exist.

        Args:
            user_name_or_id: The name or ID of the user to get.
            include_private: Whether to include private user information

        Returns:
            The requested user, if it was found.
        """
        if user_name_or_id:
            return self._get_resource(
                resource_id=user_name_or_id,
                route=USERS,
                response_model=UserResponseModel,
            )
        else:
            body = self.get(CURRENT_USER)
            return UserResponseModel.parse_obj(body)

    def get_auth_user(
        self, user_name_or_id: Union[str, UUID]
    ) -> "UserAuthModel":
        """Gets the auth model to a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Raises:
            NotImplementedError: This method is only available for the
                SQLZenStore.
        """
        raise NotImplementedError(
            "This method is only designed for use"
            " by the server endpoints. It is not designed"
            " to be called from the client side."
        )

    def list_users(
        self, user_filter_model: UserFilterModel
    ) -> Page[UserResponseModel]:
        """List all users.

        Args:
            user_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all users.
        """
        return self._list_paginated_resources(
            route=USERS,
            response_model=UserResponseModel,
            filter_model=user_filter_model,
        )

    @track(AnalyticsEvent.UPDATED_USER)
    def update_user(
        self, user_id: UUID, user_update: UserUpdateModel
    ) -> UserResponseModel:
        """Updates an existing user.

        Args:
            user_id: The id of the user to update.
            user_update: The update to be applied to the user.

        Returns:
            The updated user.
        """
        return self._update_resource(
            resource_id=user_id,
            resource_update=user_update,
            route=USERS,
            response_model=UserResponseModel,
        )

    @track(AnalyticsEvent.DELETED_USER)
    def delete_user(self, user_name_or_id: Union[str, UUID]) -> None:
        """Deletes a user.

        Args:
            user_name_or_id: The name or ID of the user to delete.
        """
        self._delete_resource(
            resource_id=user_name_or_id,
            route=USERS,
        )

    # -----
    # Teams
    # -----

    @track(AnalyticsEvent.CREATED_TEAM)
    def create_team(self, team: TeamRequestModel) -> TeamResponseModel:
        """Creates a new team.

        Args:
            team: The team model to create.

        Returns:
            The newly created team.
        """
        return self._create_resource(
            resource=team,
            route=TEAMS,
            response_model=TeamResponseModel,
        )

    def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
        """Gets a specific team.

        Args:
            team_name_or_id: Name or ID of the team to get.

        Returns:
            The requested team.
        """
        return self._get_resource(
            resource_id=team_name_or_id,
            route=TEAMS,
            response_model=TeamResponseModel,
        )

    def list_teams(
        self, team_filter_model: TeamFilterModel
    ) -> Page[TeamResponseModel]:
        """List all teams matching the given filter criteria.

        Args:
            team_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all teams matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=TEAMS,
            response_model=TeamResponseModel,
            filter_model=team_filter_model,
        )

    @track(AnalyticsEvent.UPDATED_TEAM)
    def update_team(
        self, team_id: UUID, team_update: TeamUpdateModel
    ) -> TeamResponseModel:
        """Update an existing team.

        Args:
            team_id: The ID of the team to be updated.
            team_update: The update to be applied to the team.

        Returns:
            The updated team.
        """
        return self._update_resource(
            resource_id=team_id,
            resource_update=team_update,
            route=TEAMS,
            response_model=TeamResponseModel,
        )

    @track(AnalyticsEvent.DELETED_TEAM)
    def delete_team(self, team_name_or_id: Union[str, UUID]) -> None:
        """Deletes a team.

        Args:
            team_name_or_id: Name or ID of the team to delete.
        """
        self._delete_resource(
            resource_id=team_name_or_id,
            route=TEAMS,
        )

    # -----
    # Roles
    # -----

    @track(AnalyticsEvent.CREATED_ROLE)
    def create_role(self, role: RoleRequestModel) -> RoleResponseModel:
        """Creates a new role.

        Args:
            role: The role model to create.

        Returns:
            The newly created role.
        """
        return self._create_resource(
            resource=role,
            route=ROLES,
            response_model=RoleResponseModel,
        )

    def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
        """Gets a specific role.

        Args:
            role_name_or_id: Name or ID of the role to get.

        Returns:
            The requested role.
        """
        return self._get_resource(
            resource_id=role_name_or_id,
            route=ROLES,
            response_model=RoleResponseModel,
        )

    def list_roles(
        self, role_filter_model: RoleFilterModel
    ) -> Page[RoleResponseModel]:
        """List all roles matching the given filter criteria.

        Args:
            role_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all roles matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=ROLES,
            response_model=RoleResponseModel,
            filter_model=role_filter_model,
        )

    @track(AnalyticsEvent.UPDATED_ROLE)
    def update_role(
        self, role_id: UUID, role_update: RoleUpdateModel
    ) -> RoleResponseModel:
        """Update an existing role.

        Args:
            role_id: The ID of the role to be updated.
            role_update: The update to be applied to the role.

        Returns:
            The updated role.
        """
        return self._update_resource(
            resource_id=role_id,
            resource_update=role_update,
            route=ROLES,
            response_model=RoleResponseModel,
        )

    @track(AnalyticsEvent.DELETED_ROLE)
    def delete_role(self, role_name_or_id: Union[str, UUID]) -> None:
        """Deletes a role.

        Args:
            role_name_or_id: Name or ID of the role to delete.
        """
        self._delete_resource(
            resource_id=role_name_or_id,
            route=ROLES,
        )

    # ----------------
    # Role assignments
    # ----------------

    def list_user_role_assignments(
        self, user_role_assignment_filter_model: UserRoleAssignmentFilterModel
    ) -> Page[UserRoleAssignmentResponseModel]:
        """List all roles assignments matching the given filter criteria.

        Args:
            user_role_assignment_filter_model: All filter parameters including
                pagination params.

        Returns:
            A list of all roles assignments matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=USER_ROLE_ASSIGNMENTS,
            response_model=UserRoleAssignmentResponseModel,
            filter_model=user_role_assignment_filter_model,
        )

    def get_user_role_assignment(
        self, user_role_assignment_id: UUID
    ) -> UserRoleAssignmentResponseModel:
        """Get an existing role assignment by name or ID.

        Args:
            user_role_assignment_id: Name or ID of the role assignment to get.

        Returns:
            The requested workspace.
        """
        return self._get_resource(
            resource_id=user_role_assignment_id,
            route=USER_ROLE_ASSIGNMENTS,
            response_model=UserRoleAssignmentResponseModel,
        )

    def delete_user_role_assignment(
        self, user_role_assignment_id: UUID
    ) -> None:
        """Delete a specific role assignment.

        Args:
            user_role_assignment_id: The ID of the specific role assignment
        """
        self._delete_resource(
            resource_id=user_role_assignment_id,
            route=USER_ROLE_ASSIGNMENTS,
        )

    def create_user_role_assignment(
        self, user_role_assignment: UserRoleAssignmentRequestModel
    ) -> UserRoleAssignmentResponseModel:
        """Creates a new role assignment.

        Args:
            user_role_assignment: The role assignment to create.

        Returns:
            The newly created workspace.
        """
        return self._create_resource(
            resource=user_role_assignment,
            route=USER_ROLE_ASSIGNMENTS,
            response_model=UserRoleAssignmentResponseModel,
        )

    # ---------------------
    # Team Role assignments
    # ---------------------

    def create_team_role_assignment(
        self, team_role_assignment: TeamRoleAssignmentRequestModel
    ) -> TeamRoleAssignmentResponseModel:
        """Creates a new team role assignment.

        Args:
            team_role_assignment: The role assignment model to create.

        Returns:
            The newly created role assignment.
        """
        return self._create_resource(
            resource=team_role_assignment,
            route=TEAM_ROLE_ASSIGNMENTS,
            response_model=TeamRoleAssignmentResponseModel,
        )

    def get_team_role_assignment(
        self, team_role_assignment_id: UUID
    ) -> TeamRoleAssignmentResponseModel:
        """Gets a specific role assignment.

        Args:
            team_role_assignment_id: ID of the role assignment to get.

        Returns:
            The requested role assignment.
        """
        return self._get_resource(
            resource_id=team_role_assignment_id,
            route=TEAM_ROLE_ASSIGNMENTS,
            response_model=TeamRoleAssignmentResponseModel,
        )

    def delete_team_role_assignment(
        self, team_role_assignment_id: UUID
    ) -> None:
        """Delete a specific role assignment.

        Args:
            team_role_assignment_id: The ID of the specific role assignment
        """
        self._delete_resource(
            resource_id=team_role_assignment_id,
            route=TEAM_ROLE_ASSIGNMENTS,
        )

    def list_team_role_assignments(
        self, team_role_assignment_filter_model: TeamRoleAssignmentFilterModel
    ) -> Page[TeamRoleAssignmentResponseModel]:
        """List all roles assignments matching the given filter criteria.

        Args:
            team_role_assignment_filter_model: All filter parameters including
                pagination params.

        Returns:
            A list of all roles assignments matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=TEAM_ROLE_ASSIGNMENTS,
            response_model=TeamRoleAssignmentResponseModel,
            filter_model=team_role_assignment_filter_model,
        )

    # --------
    # Workspaces
    # --------

    @track(AnalyticsEvent.CREATED_WORKSPACE)
    def create_workspace(
        self, workspace: WorkspaceRequestModel
    ) -> WorkspaceResponseModel:
        """Creates a new workspace.

        Args:
            workspace: The workspace to create.

        Returns:
            The newly created workspace.
        """
        return self._create_resource(
            resource=workspace,
            route=WORKSPACES,
            response_model=WorkspaceResponseModel,
        )

    def get_workspace(
        self, workspace_name_or_id: Union[UUID, str]
    ) -> WorkspaceResponseModel:
        """Get an existing workspace by name or ID.

        Args:
            workspace_name_or_id: Name or ID of the workspace to get.

        Returns:
            The requested workspace.
        """
        return self._get_resource(
            resource_id=workspace_name_or_id,
            route=WORKSPACES,
            response_model=WorkspaceResponseModel,
        )

    def list_workspaces(
        self, workspace_filter_model: WorkspaceFilterModel
    ) -> Page[WorkspaceResponseModel]:
        """List all workspace matching the given filter criteria.

        Args:
            workspace_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all workspace matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=WORKSPACES,
            response_model=WorkspaceResponseModel,
            filter_model=workspace_filter_model,
        )

    @track(AnalyticsEvent.UPDATED_WORKSPACE)
    def update_workspace(
        self, workspace_id: UUID, workspace_update: WorkspaceUpdateModel
    ) -> WorkspaceResponseModel:
        """Update an existing workspace.

        Args:
            workspace_id: The ID of the workspace to be updated.
            workspace_update: The update to be applied to the workspace.

        Returns:
            The updated workspace.
        """
        return self._update_resource(
            resource_id=workspace_id,
            resource_update=workspace_update,
            route=WORKSPACES,
            response_model=WorkspaceResponseModel,
        )

    @track(AnalyticsEvent.DELETED_WORKSPACE)
    def delete_workspace(self, workspace_name_or_id: Union[str, UUID]) -> None:
        """Deletes a workspace.

        Args:
            workspace_name_or_id: Name or ID of the workspace to delete.
        """
        self._delete_resource(
            resource_id=workspace_name_or_id,
            route=WORKSPACES,
        )

    # ---------
    # Pipelines
    # ---------

    @track(AnalyticsEvent.CREATE_PIPELINE)
    def create_pipeline(
        self, pipeline: PipelineRequestModel
    ) -> PipelineResponseModel:
        """Creates a new pipeline in a workspace.

        Args:
            pipeline: The pipeline to create.

        Returns:
            The newly created pipeline.
        """
        return self._create_workspace_scoped_resource(
            resource=pipeline,
            route=PIPELINES,
            response_model=PipelineResponseModel,
        )

    def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
        """Get a pipeline with a given ID.

        Args:
            pipeline_id: ID of the pipeline.

        Returns:
            The pipeline.
        """
        return self._get_resource(
            resource_id=pipeline_id,
            route=PIPELINES,
            response_model=PipelineResponseModel,
        )

    def list_pipelines(
        self, pipeline_filter_model: PipelineFilterModel
    ) -> Page[PipelineResponseModel]:
        """List all pipelines matching the given filter criteria.

        Args:
            pipeline_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all pipelines matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=PIPELINES,
            response_model=PipelineResponseModel,
            filter_model=pipeline_filter_model,
        )

    @track(AnalyticsEvent.UPDATE_PIPELINE)
    def update_pipeline(
        self, pipeline_id: UUID, pipeline_update: PipelineUpdateModel
    ) -> PipelineResponseModel:
        """Updates a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to be updated.
            pipeline_update: The update to be applied.

        Returns:
            The updated pipeline.
        """
        return self._update_resource(
            resource_id=pipeline_id,
            resource_update=pipeline_update,
            route=PIPELINES,
            response_model=PipelineResponseModel,
        )

    @track(AnalyticsEvent.DELETE_PIPELINE)
    def delete_pipeline(self, pipeline_id: UUID) -> None:
        """Deletes a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to delete.
        """
        self._delete_resource(
            resource_id=pipeline_id,
            route=PIPELINES,
        )

    # ---------
    # Builds
    # ---------

    def create_build(
        self,
        build: PipelineBuildRequestModel,
    ) -> PipelineBuildResponseModel:
        """Creates a new build in a workspace.

        Args:
            build: The build to create.

        Returns:
            The newly created build.
        """
        return self._create_workspace_scoped_resource(
            resource=build,
            route=PIPELINE_BUILDS,
            response_model=PipelineBuildResponseModel,
        )

    def get_build(self, build_id: UUID) -> PipelineBuildResponseModel:
        """Get a build with a given ID.

        Args:
            build_id: ID of the build.

        Returns:
            The build.
        """
        return self._get_resource(
            resource_id=build_id,
            route=PIPELINE_BUILDS,
            response_model=PipelineBuildResponseModel,
        )

    def list_builds(
        self, build_filter_model: PipelineBuildFilterModel
    ) -> Page[PipelineBuildResponseModel]:
        """List all builds matching the given filter criteria.

        Args:
            build_filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all builds matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=PIPELINE_BUILDS,
            response_model=PipelineBuildResponseModel,
            filter_model=build_filter_model,
        )

    def delete_build(self, build_id: UUID) -> None:
        """Deletes a build.

        Args:
            build_id: The ID of the build to delete.
        """
        self._delete_resource(
            resource_id=build_id,
            route=PIPELINE_BUILDS,
        )

    # ----------------------
    # Pipeline Deployments
    # ----------------------

    def create_deployment(
        self,
        deployment: PipelineDeploymentRequestModel,
    ) -> PipelineDeploymentResponseModel:
        """Creates a new deployment in a workspace.

        Args:
            deployment: The deployment to create.

        Returns:
            The newly created deployment.
        """
        return self._create_workspace_scoped_resource(
            resource=deployment,
            route=PIPELINE_DEPLOYMENTS,
            response_model=PipelineDeploymentResponseModel,
        )

    def get_deployment(
        self, deployment_id: UUID
    ) -> PipelineDeploymentResponseModel:
        """Get a deployment with a given ID.

        Args:
            deployment_id: ID of the deployment.

        Returns:
            The deployment.
        """
        return self._get_resource(
            resource_id=deployment_id,
            route=PIPELINE_DEPLOYMENTS,
            response_model=PipelineDeploymentResponseModel,
        )

    def list_deployments(
        self, deployment_filter_model: PipelineDeploymentFilterModel
    ) -> Page[PipelineDeploymentResponseModel]:
        """List all deployments matching the given filter criteria.

        Args:
            deployment_filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all deployments matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=PIPELINE_DEPLOYMENTS,
            response_model=PipelineDeploymentResponseModel,
            filter_model=deployment_filter_model,
        )

    def delete_deployment(self, deployment_id: UUID) -> None:
        """Deletes a deployment.

        Args:
            deployment_id: The ID of the deployment to delete.
        """
        self._delete_resource(
            resource_id=deployment_id,
            route=PIPELINE_DEPLOYMENTS,
        )

    # ---------
    # Schedules
    # ---------

    def create_schedule(
        self, schedule: ScheduleRequestModel
    ) -> ScheduleResponseModel:
        """Creates a new schedule.

        Args:
            schedule: The schedule to create.

        Returns:
            The newly created schedule.
        """
        return self._create_workspace_scoped_resource(
            resource=schedule,
            route=SCHEDULES,
            response_model=ScheduleResponseModel,
        )

    def get_schedule(self, schedule_id: UUID) -> ScheduleResponseModel:
        """Get a schedule with a given ID.

        Args:
            schedule_id: ID of the schedule.

        Returns:
            The schedule.
        """
        return self._get_resource(
            resource_id=schedule_id,
            route=SCHEDULES,
            response_model=ScheduleResponseModel,
        )

    def list_schedules(
        self, schedule_filter_model: ScheduleFilterModel
    ) -> Page[ScheduleResponseModel]:
        """List all schedules in the workspace.

        Args:
            schedule_filter_model: All filter parameters including pagination
                params

        Returns:
            A list of schedules.
        """
        return self._list_paginated_resources(
            route=SCHEDULES,
            response_model=ScheduleResponseModel,
            filter_model=schedule_filter_model,
        )

    def update_schedule(
        self,
        schedule_id: UUID,
        schedule_update: ScheduleUpdateModel,
    ) -> ScheduleResponseModel:
        """Updates a schedule.

        Args:
            schedule_id: The ID of the schedule to be updated.
            schedule_update: The update to be applied.

        Returns:
            The updated schedule.
        """
        return self._update_resource(
            resource_id=schedule_id,
            resource_update=schedule_update,
            route=SCHEDULES,
            response_model=ScheduleResponseModel,
        )

    def delete_schedule(self, schedule_id: UUID) -> None:
        """Deletes a schedule.

        Args:
            schedule_id: The ID of the schedule to delete.
        """
        self._delete_resource(
            resource_id=schedule_id,
            route=SCHEDULES,
        )

    # --------------
    # Pipeline runs
    # --------------

    def create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Creates a pipeline run.

        Args:
            pipeline_run: The pipeline run to create.

        Returns:
            The created pipeline run.
        """
        return self._create_workspace_scoped_resource(
            resource=pipeline_run,
            response_model=PipelineRunResponseModel,
            route=RUNS,
        )

    def get_run(
        self, run_name_or_id: Union[UUID, str]
    ) -> PipelineRunResponseModel:
        """Gets a pipeline run.

        Args:
            run_name_or_id: The name or ID of the pipeline run to get.

        Returns:
            The pipeline run.
        """
        return self._get_resource(
            resource_id=run_name_or_id,
            route=RUNS,
            response_model=PipelineRunResponseModel,
        )

    def get_or_create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> Tuple[PipelineRunResponseModel, bool]:
        """Gets or creates a pipeline run.

        If a run with the same ID or name already exists, it is returned.
        Otherwise, a new run is created.

        Args:
            pipeline_run: The pipeline run to get or create.

        Returns:
            The pipeline run, and a boolean indicating whether the run was
            created or not.
        """
        return self._get_or_create_workspace_scoped_resource(
            resource=pipeline_run,
            route=RUNS,
            response_model=PipelineRunResponseModel,
        )

    def list_runs(
        self, runs_filter_model: PipelineRunFilterModel
    ) -> Page[PipelineRunResponseModel]:
        """List all pipeline runs matching the given filter criteria.

        Args:
            runs_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all pipeline runs matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=RUNS,
            response_model=PipelineRunResponseModel,
            filter_model=runs_filter_model,
        )

    def update_run(
        self, run_id: UUID, run_update: PipelineRunUpdateModel
    ) -> PipelineRunResponseModel:
        """Updates a pipeline run.

        Args:
            run_id: The ID of the pipeline run to update.
            run_update: The update to be applied to the pipeline run.


        Returns:
            The updated pipeline run.
        """
        return self._update_resource(
            resource_id=run_id,
            resource_update=run_update,
            response_model=PipelineRunResponseModel,
            route=RUNS,
        )

    def delete_run(self, run_id: UUID) -> None:
        """Deletes a pipeline run.

        Args:
            run_id: The ID of the pipeline run to delete.
        """
        self._delete_resource(
            resource_id=run_id,
            route=RUNS,
        )

    # ------------------
    # Pipeline run steps
    # ------------------

    def create_run_step(
        self, step_run: StepRunRequestModel
    ) -> StepRunResponseModel:
        """Creates a step run.

        Args:
            step_run: The step run to create.

        Returns:
            The created step run.
        """
        return self._create_resource(
            resource=step_run,
            response_model=StepRunResponseModel,
            route=STEPS,
        )

    def get_run_step(self, step_run_id: UUID) -> StepRunResponseModel:
        """Get a step run by ID.

        Args:
            step_run_id: The ID of the step run to get.

        Returns:
            The step run.
        """
        return self._get_resource(
            resource_id=step_run_id,
            route=STEPS,
            response_model=StepRunResponseModel,
        )

    def list_run_steps(
        self, step_run_filter_model: StepRunFilterModel
    ) -> Page[StepRunResponseModel]:
        """List all step runs matching the given filter criteria.

        Args:
            step_run_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all step runs matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=STEPS,
            response_model=StepRunResponseModel,
            filter_model=step_run_filter_model,
        )

    def update_run_step(
        self,
        step_run_id: UUID,
        step_run_update: StepRunUpdateModel,
    ) -> StepRunResponseModel:
        """Updates a step run.

        Args:
            step_run_id: The ID of the step to update.
            step_run_update: The update to be applied to the step.

        Returns:
            The updated step run.
        """
        return self._update_resource(
            resource_id=step_run_id,
            resource_update=step_run_update,
            response_model=StepRunResponseModel,
            route=STEPS,
        )

    # ---------
    # Artifacts
    # ---------

    def create_artifact(
        self, artifact: ArtifactRequestModel
    ) -> ArtifactResponseModel:
        """Creates an artifact.

        Args:
            artifact: The artifact to create.

        Returns:
            The created artifact.
        """
        return self._create_resource(
            resource=artifact,
            response_model=ArtifactResponseModel,
            route=ARTIFACTS,
        )

    def get_artifact(self, artifact_id: UUID) -> ArtifactResponseModel:
        """Gets an artifact.

        Args:
            artifact_id: The ID of the artifact to get.

        Returns:
            The artifact.
        """
        return self._get_resource(
            resource_id=artifact_id,
            route=ARTIFACTS,
            response_model=ArtifactResponseModel,
        )

    def list_artifacts(
        self, artifact_filter_model: ArtifactFilterModel
    ) -> Page[ArtifactResponseModel]:
        """List all artifacts matching the given filter criteria.

        Args:
            artifact_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all artifacts matching the filter criteria.
        """
        return self._list_paginated_resources(
            route=ARTIFACTS,
            response_model=ArtifactResponseModel,
            filter_model=artifact_filter_model,
        )

    def delete_artifact(self, artifact_id: UUID) -> None:
        """Deletes an artifact.

        Args:
            artifact_id: The ID of the artifact to delete.
        """
        self._delete_resource(resource_id=artifact_id, route=ARTIFACTS)

    # ------------
    # Run Metadata
    # ------------

    def create_run_metadata(
        self, run_metadata: RunMetadataRequestModel
    ) -> RunMetadataResponseModel:
        """Creates run metadata.

        Args:
            run_metadata: The run metadata to create.

        Returns:
            The created run metadata.
        """
        return self._create_workspace_scoped_resource(
            resource=run_metadata,
            response_model=RunMetadataResponseModel,
            route=RUN_METADATA,
        )

    def list_run_metadata(
        self,
        run_metadata_filter_model: RunMetadataFilterModel,
    ) -> Page[RunMetadataResponseModel]:
        """List run metadata.

        Args:
            run_metadata_filter_model: All filter parameters including
                pagination params.

        Returns:
            The run metadata.
        """
        return self._list_paginated_resources(
            route=RUN_METADATA,
            response_model=RunMetadataResponseModel,
            filter_model=run_metadata_filter_model,
        )

    # -----------------
    # Code Repositories
    # -----------------

    def create_code_repository(
        self, code_repository: CodeRepositoryRequestModel
    ) -> CodeRepositoryResponseModel:
        """Creates a new code repository.

        Args:
            code_repository: Code repository to be created.

        Returns:
            The newly created code repository.
        """
        return self._create_workspace_scoped_resource(
            resource=code_repository,
            response_model=CodeRepositoryResponseModel,
            route=CODE_REPOSITORIES,
        )

    def get_code_repository(
        self, code_repository_id: UUID
    ) -> CodeRepositoryResponseModel:
        """Gets a specific code repository.

        Args:
            code_repository_id: The ID of the code repository to get.

        Returns:
            The requested code repository, if it was found.
        """
        return self._get_resource(
            resource_id=code_repository_id,
            route=CODE_REPOSITORIES,
            response_model=CodeRepositoryResponseModel,
        )

    def list_code_repositories(
        self, filter_model: CodeRepositoryFilterModel
    ) -> Page[CodeRepositoryResponseModel]:
        """List all code repositories.

        Args:
            filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all code repositories.
        """
        return self._list_paginated_resources(
            route=CODE_REPOSITORIES,
            response_model=CodeRepositoryResponseModel,
            filter_model=filter_model,
        )

    def update_code_repository(
        self, code_repository_id: UUID, update: CodeRepositoryUpdateModel
    ) -> CodeRepositoryResponseModel:
        """Updates an existing code repository.

        Args:
            code_repository_id: The ID of the code repository to update.
            update: The update to be applied to the code repository.

        Returns:
            The updated code repository.
        """
        return self._update_resource(
            resource_id=code_repository_id,
            resource_update=update,
            response_model=CodeRepositoryResponseModel,
            route=CODE_REPOSITORIES,
        )

    def delete_code_repository(self, code_repository_id: UUID) -> None:
        """Deletes a code repository.

        Args:
            code_repository_id: The ID of the code repository to delete.
        """
        self._delete_resource(
            resource_id=code_repository_id, route=CODE_REPOSITORIES
        )

    # ------------------
    # Service Connectors
    # ------------------

    def _populate_connector_type(
        self,
        *connector_models: Union[
            ServiceConnectorResponseModel, ServiceConnectorResourcesModel
        ],
    ) -> None:
        """Populates or updates the connector type of the given connector or resource models.

        If the connector type is not locally available, the connector type
        field is left as is. The local and remote flags of the connector type
        are updated accordingly.

        Args:
            connector_models: The service connector or resource models to
                populate.
        """
        for service_connector in connector_models:
            # Mark the remote connector type as being only remotely available
            if not isinstance(service_connector.connector_type, str):
                service_connector.connector_type.local = False
                service_connector.connector_type.remote = True

            if not service_connector_registry.is_registered(
                service_connector.type
            ):
                continue

            connector_type = (
                service_connector_registry.get_service_connector_type(
                    service_connector.type
                )
            )
            connector_type.local = True
            if not isinstance(service_connector.connector_type, str):
                connector_type.remote = True
            service_connector.connector_type = connector_type

    def create_service_connector(
        self, service_connector: ServiceConnectorRequestModel
    ) -> ServiceConnectorResponseModel:
        """Creates a new service connector.

        Args:
            service_connector: Service connector to be created.

        Returns:
            The newly created service connector.
        """
        connector_model = self._create_workspace_scoped_resource(
            resource=service_connector,
            route=SERVICE_CONNECTORS,
            response_model=ServiceConnectorResponseModel,
        )
        self._populate_connector_type(connector_model)
        return connector_model

    def get_service_connector(
        self, service_connector_id: UUID
    ) -> ServiceConnectorResponseModel:
        """Gets a specific service connector.

        Args:
            service_connector_id: The ID of the service connector to get.

        Returns:
            The requested service connector, if it was found.
        """
        connector_model = self._get_resource(
            resource_id=service_connector_id,
            route=SERVICE_CONNECTORS,
            response_model=ServiceConnectorResponseModel,
            params={"expand_secrets": False},
        )
        self._populate_connector_type(connector_model)
        return connector_model

    def list_service_connectors(
        self, filter_model: ServiceConnectorFilterModel
    ) -> Page[ServiceConnectorResponseModel]:
        """List all service connectors.

        Args:
            filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all service connectors.
        """
        connector_models = self._list_paginated_resources(
            route=SERVICE_CONNECTORS,
            response_model=ServiceConnectorResponseModel,
            filter_model=filter_model,
            params={"expand_secrets": False},
        )
        self._populate_connector_type(*connector_models.items)
        return connector_models

    def update_service_connector(
        self, service_connector_id: UUID, update: ServiceConnectorUpdateModel
    ) -> ServiceConnectorResponseModel:
        """Updates an existing service connector.

        The update model contains the fields to be updated. If a field value is
        set to None in the model, the field is not updated, but there are
        special rules concerning some fields:

        * the `configuration` and `secrets` fields together represent a full
        valid configuration update, not just a partial update. If either is
        set (i.e. not None) in the update, their values are merged together and
        will replace the existing configuration and secrets values.
        * the `resource_id` field value is also a full replacement value: if set
        to `None`, the resource ID is removed from the service connector.
        * the `expiration_seconds` field value is also a full replacement value:
        if set to `None`, the expiration is removed from the service connector.
        * the `secret_id` field value in the update is ignored, given that
        secrets are managed internally by the ZenML store.
        * the `labels` field is also a full labels update: if set (i.e. not
        `None`), all existing labels are removed and replaced by the new labels
        in the update.

        Args:
            service_connector_id: The ID of the service connector to update.
            update: The update to be applied to the service connector.

        Returns:
            The updated service connector.
        """
        connector_model = self._update_resource(
            resource_id=service_connector_id,
            resource_update=update,
            response_model=ServiceConnectorResponseModel,
            route=SERVICE_CONNECTORS,
        )
        self._populate_connector_type(connector_model)
        return connector_model

    def delete_service_connector(self, service_connector_id: UUID) -> None:
        """Deletes a service connector.

        Args:
            service_connector_id: The ID of the service connector to delete.
        """
        self._delete_resource(
            resource_id=service_connector_id, route=SERVICE_CONNECTORS
        )

    def verify_service_connector_config(
        self,
        service_connector: ServiceConnectorRequestModel,
        list_resources: bool = True,
    ) -> ServiceConnectorResourcesModel:
        """Verifies if a service connector configuration has access to resources.

        Args:
            service_connector: The service connector configuration to verify.
            list_resources: If True, the list of all resources accessible
                through the service connector and matching the supplied resource
                type and ID are returned.

        Returns:
            The list of resources that the service connector configuration has
            access to.
        """
        response_body = self.post(
            f"{SERVICE_CONNECTORS}{SERVICE_CONNECTOR_VERIFY}",
            body=service_connector,
            params={"list_resources": list_resources},
        )

        resources = ServiceConnectorResourcesModel.parse_obj(response_body)
        self._populate_connector_type(resources)
        return resources

    def verify_service_connector(
        self,
        service_connector_id: UUID,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
        list_resources: bool = True,
    ) -> ServiceConnectorResourcesModel:
        """Verifies if a service connector instance has access to one or more resources.

        Args:
            service_connector_id: The ID of the service connector to verify.
            resource_type: The type of resource to verify access to.
            resource_id: The ID of the resource to verify access to.
            list_resources: If True, the list of all resources accessible
                through the service connector and matching the supplied resource
                type and ID are returned.

        Returns:
            The list of resources that the service connector has access to,
            scoped to the supplied resource type and ID, if provided.
        """
        params: Dict[str, Any] = {"list_resources": list_resources}
        if resource_type:
            params["resource_type"] = resource_type
        if resource_id:
            params["resource_id"] = resource_id
        response_body = self.put(
            f"{SERVICE_CONNECTORS}/{str(service_connector_id)}{SERVICE_CONNECTOR_VERIFY}",
            params=params,
        )

        resources = ServiceConnectorResourcesModel.parse_obj(response_body)
        self._populate_connector_type(resources)
        return resources

    def get_service_connector_client(
        self,
        service_connector_id: UUID,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
    ) -> ServiceConnectorResponseModel:
        """Get a service connector client for a service connector and given resource.

        Args:
            service_connector_id: The ID of the base service connector to use.
            resource_type: The type of resource to get a client for.
            resource_id: The ID of the resource to get a client for.

        Returns:
            A service connector client that can be used to access the given
            resource.
        """
        params = {}
        if resource_type:
            params["resource_type"] = resource_type
        if resource_id:
            params["resource_id"] = resource_id
        response_body = self.get(
            f"{SERVICE_CONNECTORS}/{str(service_connector_id)}{SERVICE_CONNECTOR_CLIENT}",
            params=params,
        )

        connector = ServiceConnectorResponseModel.parse_obj(response_body)
        self._populate_connector_type(connector)
        return connector

    def list_service_connector_resources(
        self,
        user_name_or_id: Union[str, UUID],
        workspace_name_or_id: Union[str, UUID],
        connector_type: Optional[str] = None,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
    ) -> List[ServiceConnectorResourcesModel]:
        """List resources that can be accessed by service connectors.

        Args:
            user_name_or_id: The name or ID of the user to scope to.
            workspace_name_or_id: The name or ID of the workspace to scope to.
            connector_type: The type of service connector to scope to.
            resource_type: The type of resource to scope to.
            resource_id: The ID of the resource to scope to.

        Returns:
            The matching list of resources that available service
            connectors have access to.
        """
        params = {}
        if connector_type:
            params["connector_type"] = connector_type
        if resource_type:
            params["resource_type"] = resource_type
        if resource_id:
            params["resource_id"] = resource_id
        response_body = self.get(
            f"{WORKSPACES}/{workspace_name_or_id}{SERVICE_CONNECTORS}{SERVICE_CONNECTOR_RESOURCES}",
            params=params,
        )

        assert isinstance(response_body, list)
        resource_list = [
            ServiceConnectorResourcesModel.parse_obj(item)
            for item in response_body
        ]

        self._populate_connector_type(*resource_list)

        # For service connectors with types that are only locally available,
        # we need to retrieve the resource list locally
        for idx, resources in enumerate(resource_list):
            if isinstance(resources.connector_type, str):
                # Skip connector types that are neither locally nor remotely
                # available
                continue
            if resources.connector_type.remote:
                # Skip connector types that are remotely available
                continue

            # Retrieve the resource list locally
            assert resources.id is not None
            connector = self.get_service_connector(resources.id)
            connector_instance = (
                service_connector_registry.instantiate_connector(
                    model=connector
                )
            )

            try:
                local_resources = connector_instance.verify(
                    resource_type=resource_type,
                    resource_id=resource_id,
                )
            except (ValueError, AuthorizationException) as e:
                logger.error(
                    f'Failed to fetch {resource_type or "available"} '
                    f"resources from service connector {connector.name}/"
                    f"{connector.id}: {e}"
                )
                continue

            resource_list[idx] = local_resources

        return resource_list

    def list_service_connector_types(
        self,
        connector_type: Optional[str] = None,
        resource_type: Optional[str] = None,
        auth_method: Optional[str] = None,
    ) -> List[ServiceConnectorTypeModel]:
        """Get a list of service connector types.

        Args:
            connector_type: Filter by connector type.
            resource_type: Filter by resource type.
            auth_method: Filter by authentication method.

        Returns:
            List of service connector types.
        """
        params = {}
        if connector_type:
            params["connector_type"] = connector_type
        if resource_type:
            params["resource_type"] = resource_type
        if auth_method:
            params["auth_method"] = auth_method
        response_body = self.get(
            SERVICE_CONNECTOR_TYPES,
            params=params,
        )

        assert isinstance(response_body, list)
        remote_connector_types = [
            ServiceConnectorTypeModel.parse_obj(item) for item in response_body
        ]

        # Mark the remote connector types as being only remotely available
        for c in remote_connector_types:
            c.local = False
            c.remote = True

        local_connector_types = (
            service_connector_registry.list_service_connector_types(
                connector_type=connector_type,
                resource_type=resource_type,
                auth_method=auth_method,
            )
        )

        # Add the connector types in the local registry to the list of
        # connector types available remotely. Overwrite those that have
        # the same connector type but mark them as being remotely available.
        connector_types_map = {
            connector_type.connector_type: connector_type
            for connector_type in remote_connector_types
        }

        for connector in local_connector_types:
            if connector.connector_type in connector_types_map:
                connector.remote = True
            connector_types_map[connector.connector_type] = connector

        return list(connector_types_map.values())

    def get_service_connector_type(
        self,
        connector_type: str,
    ) -> ServiceConnectorTypeModel:
        """Returns the requested service connector type.

        Args:
            connector_type: the service connector type identifier.

        Returns:
            The requested service connector type.
        """
        # Use the local registry to get the service connector type, if it
        # exists.
        local_connector_type: Optional[ServiceConnectorTypeModel] = None
        if service_connector_registry.is_registered(connector_type):
            local_connector_type = (
                service_connector_registry.get_service_connector_type(
                    connector_type
                )
            )
        try:
            response_body = self.get(
                f"{SERVICE_CONNECTOR_TYPES}/{connector_type}",
            )
            remote_connector_type = ServiceConnectorTypeModel.parse_obj(
                response_body
            )
            if local_connector_type:
                # If locally available, return the local connector type but
                # mark it as being remotely available.
                local_connector_type.remote = True
                return local_connector_type

            # Mark the remote connector type as being only remotely available
            remote_connector_type.local = False
            remote_connector_type.remote = True

            return remote_connector_type
        except KeyError:
            # If the service connector type is not found, check the local
            # registry.
            return service_connector_registry.get_service_connector_type(
                connector_type
            )

    # =======================
    # Internal helper methods
    # =======================

    def _get_auth_token(self) -> str:
        """Get the authentication token for the REST store.

        Returns:
            The authentication token.

        Raises:
            ValueError: if the response from the server isn't in the right
                format.
        """
        if self._api_token is None:
            # Check if the API token is already stored in the config
            if self.config.api_token:
                self._api_token = self.config.api_token
            # Check if the username and password are provided in the config
            elif (
                self.config.username is not None
                and self.config.password is not None
            ):
                response = self._handle_response(
                    requests.post(
                        self.url + API + VERSION_1 + LOGIN,
                        data={
                            "username": self.config.username,
                            "password": self.config.password,
                        },
                        verify=self.config.verify_ssl,
                        timeout=self.config.http_timeout,
                    )
                )
                if (
                    not isinstance(response, dict)
                    or "access_token" not in response
                ):
                    raise ValueError(
                        f"Bad API Response. Expected access token dict, got "
                        f"{type(response)}"
                    )
                self._api_token = response["access_token"]
                self.config.api_token = self._api_token
            else:
                raise ValueError(
                    "No API token or username/password provided. Please "
                    "provide either a token or a username and password in "
                    "the ZenStore config."
                )
        return self._api_token

    @property
    def session(self) -> requests.Session:
        """Authenticate to the ZenML server.

        Returns:
            A requests session with the authentication token.
        """
        if self._session is None:
            if self.config.verify_ssl is False:
                urllib3.disable_warnings(
                    urllib3.exceptions.InsecureRequestWarning
                )

            self._session = requests.Session()
            self._session.verify = self.config.verify_ssl
            token = self._get_auth_token()
            self._session.headers.update({"Authorization": "Bearer " + token})
            logger.debug("Authenticated to ZenML server.")
        return self._session

    @staticmethod
    def _handle_response(response: requests.Response) -> Json:
        """Handle API response, translating http status codes to Exception.

        Args:
            response: The response to handle.

        Returns:
            The parsed response.

        Raises:
            ValueError: if the response is not in the right format.
            RuntimeError: if an error response is received from the server
                and a more specific exception cannot be determined.
            exc: the exception converted from an error response, if one
                is returned from the server.
        """
        if 200 <= response.status_code < 300:
            try:
                payload: Json = response.json()
                return payload
            except requests.exceptions.JSONDecodeError:
                raise ValueError(
                    "Bad response from API. Expected json, got\n"
                    f"{response.text}"
                )
        elif response.status_code >= 400:
            exc = exception_from_response(response)
            if exc is not None:
                raise exc
            else:
                raise RuntimeError(
                    f"{response.status_code} HTTP Error received from server: "
                    f"{response.text}"
                )
        else:
            raise RuntimeError(
                "Error retrieving from API. Got response "
                f"{response.status_code} with body:\n{response.text}"
            )

    def _request(
        self,
        method: str,
        url: str,
        params: Optional[Dict[str, Any]] = None,
        **kwargs: Any,
    ) -> Json:
        """Make a request to the REST API.

        Args:
            method: The HTTP method to use.
            url: The URL to request.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The parsed response.
        """
        params = {k: str(v) for k, v in params.items()} if params else {}

        self.session.headers.update(
            {source_context.name: source_context.get().value}
        )

        try:
            return self._handle_response(
                self.session.request(
                    method,
                    url,
                    params=params,
                    verify=self.config.verify_ssl,
                    timeout=self.config.http_timeout,
                    **kwargs,
                )
            )
        except AuthorizationException:
            # The authentication token could have expired; refresh it and try
            # again
            self._session = None
            return self._handle_response(
                self.session.request(
                    method,
                    url,
                    params=params,
                    verify=self.config.verify_ssl,
                    timeout=self.config.http_timeout,
                    **kwargs,
                )
            )

    def get(
        self, path: str, params: Optional[Dict[str, Any]] = None, **kwargs: Any
    ) -> Json:
        """Make a GET request to the given endpoint path.

        Args:
            path: The path to the endpoint.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The response body.
        """
        logger.debug(f"Sending GET request to {path}...")
        return self._request(
            "GET", self.url + API + VERSION_1 + path, params=params, **kwargs
        )

    def delete(
        self, path: str, params: Optional[Dict[str, Any]] = None, **kwargs: Any
    ) -> Json:
        """Make a DELETE request to the given endpoint path.

        Args:
            path: The path to the endpoint.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The response body.
        """
        logger.debug(f"Sending DELETE request to {path}...")
        return self._request(
            "DELETE",
            self.url + API + VERSION_1 + path,
            params=params,
            **kwargs,
        )

    def post(
        self,
        path: str,
        body: BaseModel,
        params: Optional[Dict[str, Any]] = None,
        **kwargs: Any,
    ) -> Json:
        """Make a POST request to the given endpoint path.

        Args:
            path: The path to the endpoint.
            body: The body to send.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The response body.
        """
        logger.debug(f"Sending POST request to {path}...")
        return self._request(
            "POST",
            self.url + API + VERSION_1 + path,
            data=body.json(),
            params=params,
            **kwargs,
        )

    def put(
        self,
        path: str,
        body: Optional[BaseModel] = None,
        params: Optional[Dict[str, Any]] = None,
        **kwargs: Any,
    ) -> Json:
        """Make a PUT request to the given endpoint path.

        Args:
            path: The path to the endpoint.
            body: The body to send.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The response body.
        """
        logger.debug(f"Sending PUT request to {path}...")
        data = body.json(exclude_unset=True) if body else None
        return self._request(
            "PUT",
            self.url + API + VERSION_1 + path,
            data=data,
            params=params,
            **kwargs,
        )

    def _create_resource(
        self,
        resource: BaseRequestModel,
        response_model: Type[AnyResponseModel],
        route: str,
        params: Optional[Dict[str, Any]] = None,
    ) -> AnyResponseModel:
        """Create a new resource.

        Args:
            resource: The resource to create.
            route: The resource REST API route to use.
            response_model: Optional model to use to deserialize the response
                body. If not provided, the resource class itself will be used.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            The created resource.
        """
        response_body = self.post(f"{route}", body=resource, params=params)
        return response_model.parse_obj(response_body)

    def _create_workspace_scoped_resource(
        self,
        resource: WorkspaceScopedRequestModel,
        response_model: Type[AnyResponseModel],
        route: str,
        params: Optional[Dict[str, Any]] = None,
    ) -> AnyResponseModel:
        """Create a new workspace scoped resource.

        Args:
            resource: The resource to create.
            route: The resource REST API route to use.
            response_model: Optional model to use to deserialize the response
                body. If not provided, the resource class itself will be used.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            The created resource.
        """
        return self._create_resource(
            resource=resource,
            response_model=response_model,
            route=f"{WORKSPACES}/{str(resource.workspace)}{route}",
            params=params,
        )

    def _get_or_create_resource(
        self,
        resource: BaseRequestModel,
        response_model: Type[AnyResponseModel],
        route: str,
        params: Optional[Dict[str, Any]] = None,
    ) -> Tuple[AnyResponseModel, bool]:
        """Get or create a resource.

        Args:
            resource: The resource to get or create.
            route: The resource REST API route to use.
            response_model: Optional model to use to deserialize the response
                body. If not provided, the resource class itself will be used.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            The created resource, and a boolean indicating whether the resource
            was created or not.

        Raises:
            ValueError: If the response body is not a list with 2 elements
                where the first element is the resource and the second element
                a boolean indicating whether the resource was created or not.
        """
        response_body = self.post(
            f"{route}{GET_OR_CREATE}",
            body=resource,
            params=params,
        )
        if not isinstance(response_body, list):
            raise ValueError(
                f"Expected a list response from the {route}{GET_OR_CREATE} "
                f"endpoint but got {type(response_body)} instead."
            )
        if len(response_body) != 2:
            raise ValueError(
                f"Expected a list response with 2 elements from the "
                f"{route}{GET_OR_CREATE} endpoint but got {len(response_body)} "
                f"elements instead."
            )
        model_json, was_created = response_body
        if not isinstance(was_created, bool):
            raise ValueError(
                f"Expected a boolean as the second element of the list "
                f"response from the {route}{GET_OR_CREATE} endpoint but got "
                f"{type(was_created)} instead."
            )
        return response_model.parse_obj(model_json), was_created

    def _get_or_create_workspace_scoped_resource(
        self,
        resource: WorkspaceScopedRequestModel,
        response_model: Type[AnyResponseModel],
        route: str,
        params: Optional[Dict[str, Any]] = None,
    ) -> Tuple[AnyResponseModel, bool]:
        """Get or create a workspace scoped resource.

        Args:
            resource: The resource to get or create.
            route: The resource REST API route to use.
            response_model: Optional model to use to deserialize the response
                body. If not provided, the resource class itself will be used.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            The created resource, and a boolean indicating whether the resource
            was created or not.
        """
        return self._get_or_create_resource(
            resource=resource,
            response_model=response_model,
            route=f"{WORKSPACES}/{str(resource.workspace)}{route}",
            params=params,
        )

    def _get_resource(
        self,
        resource_id: Union[str, UUID],
        route: str,
        response_model: Type[AnyResponseModel],
        params: Optional[Dict[str, Any]] = None,
    ) -> AnyResponseModel:
        """Retrieve a single resource.

        Args:
            resource_id: The ID of the resource to retrieve.
            route: The resource REST API route to use.
            response_model: Model to use to serialize the response body.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            The retrieved resource.
        """
        body = self.get(f"{route}/{str(resource_id)}", params=params)
        return response_model.parse_obj(body)

    def _list_paginated_resources(
        self,
        route: str,
        response_model: Type[AnyResponseModel],
        filter_model: BaseFilterModel,
        params: Optional[Dict[str, Any]] = None,
    ) -> Page[AnyResponseModel]:
        """Retrieve a list of resources filtered by some criteria.

        Args:
            route: The resource REST API route to use.
            response_model: Model to use to serialize the response body.
            filter_model: The filter model to use for the list query.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            List of retrieved resources matching the filter criteria.

        Raises:
            ValueError: If the value returned by the server is not a list.
        """
        # leave out filter params that are not supplied
        params = params or {}
        params.update(filter_model.dict(exclude_none=True))
        body = self.get(f"{route}", params=params)
        if not isinstance(body, dict):
            raise ValueError(
                f"Bad API Response. Expected list, got {type(body)}"
            )
        # The initial page of items will be of type BaseResponseModel
        page_of_items: Page[AnyResponseModel] = Page.parse_obj(body)
        # So these items will be parsed into their correct types like here
        page_of_items.items = [
            response_model.parse_obj(generic_item)
            for generic_item in page_of_items.items
        ]
        return page_of_items

    def _list_resources(
        self,
        route: str,
        response_model: Type[AnyResponseModel],
        **filters: Any,
    ) -> List[AnyResponseModel]:
        """Retrieve a list of resources filtered by some criteria.

        Args:
            route: The resource REST API route to use.
            response_model: Model to use to serialize the response body.
            filters: Filter parameters to use in the query.

        Returns:
            List of retrieved resources matching the filter criteria.

        Raises:
            ValueError: If the value returned by the server is not a list.
        """
        # leave out filter params that are not supplied
        params = dict(filter(lambda x: x[1] is not None, filters.items()))
        body = self.get(f"{route}", params=params)
        if not isinstance(body, list):
            raise ValueError(
                f"Bad API Response. Expected list, got {type(body)}"
            )
        return [response_model.parse_obj(entry) for entry in body]

    def _update_resource(
        self,
        resource_id: UUID,
        resource_update: BaseModel,
        response_model: Type[AnyResponseModel],
        route: str,
        params: Optional[Dict[str, Any]] = None,
    ) -> AnyResponseModel:
        """Update an existing resource.

        Args:
            resource_id: The id of the resource to update.
            resource_update: The resource update.
            response_model: Optional model to use to deserialize the response
                body. If not provided, the resource class itself will be used.
            route: The resource REST API route to use.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            The updated resource.
        """
        response_body = self.put(
            f"{route}/{str(resource_id)}", body=resource_update, params=params
        )

        return response_model.parse_obj(response_body)

    def _delete_resource(
        self, resource_id: Union[str, UUID], route: str
    ) -> None:
        """Delete a resource.

        Args:
            resource_id: The ID of the resource to delete.
            route: The resource REST API route to use.
        """
        self.delete(f"{route}/{str(resource_id)}")
session: Session property readonly

Authenticate to the ZenML server.

Returns:

Type Description
Session

A requests session with the authentication token.

CONFIG_TYPE (StoreConfiguration) pydantic-model

REST ZenML store configuration.

Attributes:

Name Type Description
type StoreType

The type of the store.

secrets_store Optional[zenml.config.secrets_store_config.SecretsStoreConfiguration]

The configuration of the secrets store to use. This defaults to a REST secrets store that extends the REST ZenML store.

username Optional[str]

The username to use to connect to the Zen server.

password Optional[str]

The password to use to connect to the Zen server.

verify_ssl Union[bool, str]

Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use or the CA bundle value itself.

http_timeout int

The timeout to use for all requests.

Source code in zenml/zen_stores/rest_zen_store.py
class RestZenStoreConfiguration(StoreConfiguration):
    """REST ZenML store configuration.

    Attributes:
        type: The type of the store.
        secrets_store: The configuration of the secrets store to use.
            This defaults to a REST secrets store that extends the REST ZenML
            store.
        username: The username to use to connect to the Zen server.
        password: The password to use to connect to the Zen server.
        verify_ssl: Either a boolean, in which case it controls whether we
            verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use or the CA bundle value itself.
        http_timeout: The timeout to use for all requests.

    """

    type: StoreType = StoreType.REST

    secrets_store: Optional[SecretsStoreConfiguration] = None

    username: Optional[str] = None
    password: Optional[str] = None
    api_token: Optional[str] = None
    verify_ssl: Union[bool, str] = True
    http_timeout: int = DEFAULT_HTTP_TIMEOUT

    @validator("secrets_store")
    def validate_secrets_store(
        cls, secrets_store: Optional[SecretsStoreConfiguration]
    ) -> SecretsStoreConfiguration:
        """Ensures that the secrets store uses an associated REST secrets store.

        Args:
            secrets_store: The secrets store config to be validated.

        Returns:
            The validated secrets store config.

        Raises:
            ValueError: If the secrets store is not of type REST.
        """
        if secrets_store is None:
            secrets_store = RestSecretsStoreConfiguration()
        elif secrets_store.type != SecretsStoreType.REST:
            raise ValueError(
                "The secrets store associated with a REST zen store must be "
                f"of type REST, but is of type {secrets_store.type}."
            )

        return secrets_store

    @root_validator
    def validate_credentials(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        """Validates the credentials provided in the values dictionary.

        Args:
            values: A dictionary containing the values to be validated.

        Raises:
            ValueError: If neither api_token nor username is set.

        Returns:
            The values dictionary.
        """
        # Check if the values dictionary contains either an api_token or a
        # username as non-empty strings.
        if values.get("api_token") or values.get("username"):
            return values
        else:
            raise ValueError(
                "Neither api_token nor username is set in the store config."
            )

    @validator("url")
    def validate_url(cls, url: str) -> str:
        """Validates that the URL is a well-formed REST store URL.

        Args:
            url: The URL to be validated.

        Returns:
            The validated URL without trailing slashes.

        Raises:
            ValueError: If the URL is not a well-formed REST store URL.
        """
        url = url.rstrip("/")
        scheme = re.search("^([a-z0-9]+://)", url)
        if scheme is None or scheme.group() not in ("https://", "http://"):
            raise ValueError(
                "Invalid URL for REST store: {url}. Should be in the form "
                "https://hostname[:port] or http://hostname[:port]."
            )

        # When running inside a container, if the URL uses localhost, the
        # target service will not be available. We try to replace localhost
        # with one of the special Docker or K3D internal hostnames.
        url = replace_localhost_with_internal_hostname(url)

        return url

    @validator("verify_ssl")
    def validate_verify_ssl(
        cls, verify_ssl: Union[bool, str]
    ) -> Union[bool, str]:
        """Validates that the verify_ssl either points to a file or is a bool.

        Args:
            verify_ssl: The verify_ssl value to be validated.

        Returns:
            The validated verify_ssl value.
        """
        secret_folder = Path(
            GlobalConfiguration().local_stores_path,
            "certificates",
        )
        if isinstance(verify_ssl, bool) or verify_ssl.startswith(
            str(secret_folder)
        ):
            return verify_ssl

        if os.path.isfile(verify_ssl):
            with open(verify_ssl, "r") as f:
                verify_ssl = f.read()

        fileio.makedirs(str(secret_folder))
        file_path = Path(secret_folder, "ca_bundle.pem")
        with open(file_path, "w") as f:
            f.write(verify_ssl)
        file_path.chmod(0o600)
        verify_ssl = str(file_path)

        return verify_ssl

    @classmethod
    def supports_url_scheme(cls, url: str) -> bool:
        """Check if a URL scheme is supported by this store.

        Args:
            url: The URL to check.

        Returns:
            True if the URL scheme is supported, False otherwise.
        """
        return urlparse(url).scheme in ("http", "https")

    def expand_certificates(self) -> None:
        """Expands the certificates in the verify_ssl field."""
        # Load the certificate values back into the configuration
        if isinstance(self.verify_ssl, str) and os.path.isfile(
            self.verify_ssl
        ):
            with open(self.verify_ssl, "r") as f:
                self.verify_ssl = f.read()

    @classmethod
    def copy_configuration(
        cls,
        config: "StoreConfiguration",
        config_path: str,
        load_config_path: Optional[PurePath] = None,
    ) -> "StoreConfiguration":
        """Create a copy of the store config using a different path.

        This method is used to create a copy of the store configuration that can
        be loaded using a different configuration path or in the context of a
        new environment, such as a container image.

        The configuration files accompanying the store configuration are also
        copied to the new configuration path (e.g. certificates etc.).

        Args:
            config: The store configuration to copy.
            config_path: new path where the configuration copy will be loaded
                from.
            load_config_path: absolute path that will be used to load the copied
                configuration. This can be set to a value different from
                `config_path` if the configuration copy will be loaded from
                a different environment, e.g. when the configuration is copied
                to a container image and loaded using a different absolute path.
                This will be reflected in the paths and URLs encoded in the
                copied configuration.

        Returns:
            A new store configuration object that reflects the new configuration
            path.
        """
        assert isinstance(config, RestZenStoreConfiguration)
        assert config.api_token is not None
        config = config.copy(exclude={"username", "password"}, deep=True)
        # Load the certificate values back into the configuration
        config.expand_certificates()
        return config

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the `verify_ssl` attribute can be expanded to the contents
        # of the certificate file.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/rest_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the `verify_ssl` attribute can be expanded to the contents
    # of the certificate file.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
copy_configuration(config, config_path, load_config_path=None) classmethod

Create a copy of the store config using a different path.

This method is used to create a copy of the store configuration that can be loaded using a different configuration path or in the context of a new environment, such as a container image.

The configuration files accompanying the store configuration are also copied to the new configuration path (e.g. certificates etc.).

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to copy.

required
config_path str

new path where the configuration copy will be loaded from.

required
load_config_path Optional[pathlib.PurePath]

absolute path that will be used to load the copied configuration. This can be set to a value different from config_path if the configuration copy will be loaded from a different environment, e.g. when the configuration is copied to a container image and loaded using a different absolute path. This will be reflected in the paths and URLs encoded in the copied configuration.

None

Returns:

Type Description
StoreConfiguration

A new store configuration object that reflects the new configuration path.

Source code in zenml/zen_stores/rest_zen_store.py
@classmethod
def copy_configuration(
    cls,
    config: "StoreConfiguration",
    config_path: str,
    load_config_path: Optional[PurePath] = None,
) -> "StoreConfiguration":
    """Create a copy of the store config using a different path.

    This method is used to create a copy of the store configuration that can
    be loaded using a different configuration path or in the context of a
    new environment, such as a container image.

    The configuration files accompanying the store configuration are also
    copied to the new configuration path (e.g. certificates etc.).

    Args:
        config: The store configuration to copy.
        config_path: new path where the configuration copy will be loaded
            from.
        load_config_path: absolute path that will be used to load the copied
            configuration. This can be set to a value different from
            `config_path` if the configuration copy will be loaded from
            a different environment, e.g. when the configuration is copied
            to a container image and loaded using a different absolute path.
            This will be reflected in the paths and URLs encoded in the
            copied configuration.

    Returns:
        A new store configuration object that reflects the new configuration
        path.
    """
    assert isinstance(config, RestZenStoreConfiguration)
    assert config.api_token is not None
    config = config.copy(exclude={"username", "password"}, deep=True)
    # Load the certificate values back into the configuration
    config.expand_certificates()
    return config
expand_certificates(self)

Expands the certificates in the verify_ssl field.

Source code in zenml/zen_stores/rest_zen_store.py
def expand_certificates(self) -> None:
    """Expands the certificates in the verify_ssl field."""
    # Load the certificate values back into the configuration
    if isinstance(self.verify_ssl, str) and os.path.isfile(
        self.verify_ssl
    ):
        with open(self.verify_ssl, "r") as f:
            self.verify_ssl = f.read()
supports_url_scheme(url) classmethod

Check if a URL scheme is supported by this store.

Parameters:

Name Type Description Default
url str

The URL to check.

required

Returns:

Type Description
bool

True if the URL scheme is supported, False otherwise.

Source code in zenml/zen_stores/rest_zen_store.py
@classmethod
def supports_url_scheme(cls, url: str) -> bool:
    """Check if a URL scheme is supported by this store.

    Args:
        url: The URL to check.

    Returns:
        True if the URL scheme is supported, False otherwise.
    """
    return urlparse(url).scheme in ("http", "https")
validate_credentials(values) classmethod

Validates the credentials provided in the values dictionary.

Parameters:

Name Type Description Default
values Dict[str, Any]

A dictionary containing the values to be validated.

required

Exceptions:

Type Description
ValueError

If neither api_token nor username is set.

Returns:

Type Description
Dict[str, Any]

The values dictionary.

Source code in zenml/zen_stores/rest_zen_store.py
@root_validator
def validate_credentials(cls, values: Dict[str, Any]) -> Dict[str, Any]:
    """Validates the credentials provided in the values dictionary.

    Args:
        values: A dictionary containing the values to be validated.

    Raises:
        ValueError: If neither api_token nor username is set.

    Returns:
        The values dictionary.
    """
    # Check if the values dictionary contains either an api_token or a
    # username as non-empty strings.
    if values.get("api_token") or values.get("username"):
        return values
    else:
        raise ValueError(
            "Neither api_token nor username is set in the store config."
        )
validate_secrets_store(secrets_store) classmethod

Ensures that the secrets store uses an associated REST secrets store.

Parameters:

Name Type Description Default
secrets_store Optional[zenml.config.secrets_store_config.SecretsStoreConfiguration]

The secrets store config to be validated.

required

Returns:

Type Description
SecretsStoreConfiguration

The validated secrets store config.

Exceptions:

Type Description
ValueError

If the secrets store is not of type REST.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("secrets_store")
def validate_secrets_store(
    cls, secrets_store: Optional[SecretsStoreConfiguration]
) -> SecretsStoreConfiguration:
    """Ensures that the secrets store uses an associated REST secrets store.

    Args:
        secrets_store: The secrets store config to be validated.

    Returns:
        The validated secrets store config.

    Raises:
        ValueError: If the secrets store is not of type REST.
    """
    if secrets_store is None:
        secrets_store = RestSecretsStoreConfiguration()
    elif secrets_store.type != SecretsStoreType.REST:
        raise ValueError(
            "The secrets store associated with a REST zen store must be "
            f"of type REST, but is of type {secrets_store.type}."
        )

    return secrets_store
validate_url(url) classmethod

Validates that the URL is a well-formed REST store URL.

Parameters:

Name Type Description Default
url str

The URL to be validated.

required

Returns:

Type Description
str

The validated URL without trailing slashes.

Exceptions:

Type Description
ValueError

If the URL is not a well-formed REST store URL.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("url")
def validate_url(cls, url: str) -> str:
    """Validates that the URL is a well-formed REST store URL.

    Args:
        url: The URL to be validated.

    Returns:
        The validated URL without trailing slashes.

    Raises:
        ValueError: If the URL is not a well-formed REST store URL.
    """
    url = url.rstrip("/")
    scheme = re.search("^([a-z0-9]+://)", url)
    if scheme is None or scheme.group() not in ("https://", "http://"):
        raise ValueError(
            "Invalid URL for REST store: {url}. Should be in the form "
            "https://hostname[:port] or http://hostname[:port]."
        )

    # When running inside a container, if the URL uses localhost, the
    # target service will not be available. We try to replace localhost
    # with one of the special Docker or K3D internal hostnames.
    url = replace_localhost_with_internal_hostname(url)

    return url
validate_verify_ssl(verify_ssl) classmethod

Validates that the verify_ssl either points to a file or is a bool.

Parameters:

Name Type Description Default
verify_ssl Union[bool, str]

The verify_ssl value to be validated.

required

Returns:

Type Description
Union[bool, str]

The validated verify_ssl value.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("verify_ssl")
def validate_verify_ssl(
    cls, verify_ssl: Union[bool, str]
) -> Union[bool, str]:
    """Validates that the verify_ssl either points to a file or is a bool.

    Args:
        verify_ssl: The verify_ssl value to be validated.

    Returns:
        The validated verify_ssl value.
    """
    secret_folder = Path(
        GlobalConfiguration().local_stores_path,
        "certificates",
    )
    if isinstance(verify_ssl, bool) or verify_ssl.startswith(
        str(secret_folder)
    ):
        return verify_ssl

    if os.path.isfile(verify_ssl):
        with open(verify_ssl, "r") as f:
            verify_ssl = f.read()

    fileio.makedirs(str(secret_folder))
    file_path = Path(secret_folder, "ca_bundle.pem")
    with open(file_path, "w") as f:
        f.write(verify_ssl)
    file_path.chmod(0o600)
    verify_ssl = str(file_path)

    return verify_ssl
create_artifact(self, artifact)

Creates an artifact.

Parameters:

Name Type Description Default
artifact ArtifactRequestModel

The artifact to create.

required

Returns:

Type Description
ArtifactResponseModel

The created artifact.

Source code in zenml/zen_stores/rest_zen_store.py
def create_artifact(
    self, artifact: ArtifactRequestModel
) -> ArtifactResponseModel:
    """Creates an artifact.

    Args:
        artifact: The artifact to create.

    Returns:
        The created artifact.
    """
    return self._create_resource(
        resource=artifact,
        response_model=ArtifactResponseModel,
        route=ARTIFACTS,
    )
create_build(self, build)

Creates a new build in a workspace.

Parameters:

Name Type Description Default
build PipelineBuildRequestModel

The build to create.

required

Returns:

Type Description
PipelineBuildResponseModel

The newly created build.

Source code in zenml/zen_stores/rest_zen_store.py
def create_build(
    self,
    build: PipelineBuildRequestModel,
) -> PipelineBuildResponseModel:
    """Creates a new build in a workspace.

    Args:
        build: The build to create.

    Returns:
        The newly created build.
    """
    return self._create_workspace_scoped_resource(
        resource=build,
        route=PIPELINE_BUILDS,
        response_model=PipelineBuildResponseModel,
    )
create_code_repository(self, code_repository)

Creates a new code repository.

Parameters:

Name Type Description Default
code_repository CodeRepositoryRequestModel

Code repository to be created.

required

Returns:

Type Description
CodeRepositoryResponseModel

The newly created code repository.

Source code in zenml/zen_stores/rest_zen_store.py
def create_code_repository(
    self, code_repository: CodeRepositoryRequestModel
) -> CodeRepositoryResponseModel:
    """Creates a new code repository.

    Args:
        code_repository: Code repository to be created.

    Returns:
        The newly created code repository.
    """
    return self._create_workspace_scoped_resource(
        resource=code_repository,
        response_model=CodeRepositoryResponseModel,
        route=CODE_REPOSITORIES,
    )
create_deployment(self, deployment)

Creates a new deployment in a workspace.

Parameters:

Name Type Description Default
deployment PipelineDeploymentRequestModel

The deployment to create.

required

Returns:

Type Description
PipelineDeploymentResponseModel

The newly created deployment.

Source code in zenml/zen_stores/rest_zen_store.py
def create_deployment(
    self,
    deployment: PipelineDeploymentRequestModel,
) -> PipelineDeploymentResponseModel:
    """Creates a new deployment in a workspace.

    Args:
        deployment: The deployment to create.

    Returns:
        The newly created deployment.
    """
    return self._create_workspace_scoped_resource(
        resource=deployment,
        route=PIPELINE_DEPLOYMENTS,
        response_model=PipelineDeploymentResponseModel,
    )
create_flavor(*args, **kwargs)

Creates a new stack component flavor.

Parameters:

Name Type Description Default
flavor

The stack component flavor to create.

required

Returns:

Type Description
Any

The newly created flavor.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_pipeline(*args, **kwargs)

Creates a new pipeline in a workspace.

Parameters:

Name Type Description Default
pipeline

The pipeline to create.

required

Returns:

Type Description
Any

The newly created pipeline.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_role(*args, **kwargs)

Creates a new role.

Parameters:

Name Type Description Default
role

The role model to create.

required

Returns:

Type Description
Any

The newly created role.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_run(self, pipeline_run)

Creates a pipeline run.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to create.

required

Returns:

Type Description
PipelineRunResponseModel

The created pipeline run.

Source code in zenml/zen_stores/rest_zen_store.py
def create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Creates a pipeline run.

    Args:
        pipeline_run: The pipeline run to create.

    Returns:
        The created pipeline run.
    """
    return self._create_workspace_scoped_resource(
        resource=pipeline_run,
        response_model=PipelineRunResponseModel,
        route=RUNS,
    )
create_run_metadata(self, run_metadata)

Creates run metadata.

Parameters:

Name Type Description Default
run_metadata RunMetadataRequestModel

The run metadata to create.

required

Returns:

Type Description
RunMetadataResponseModel

The created run metadata.

Source code in zenml/zen_stores/rest_zen_store.py
def create_run_metadata(
    self, run_metadata: RunMetadataRequestModel
) -> RunMetadataResponseModel:
    """Creates run metadata.

    Args:
        run_metadata: The run metadata to create.

    Returns:
        The created run metadata.
    """
    return self._create_workspace_scoped_resource(
        resource=run_metadata,
        response_model=RunMetadataResponseModel,
        route=RUN_METADATA,
    )
create_run_step(self, step_run)

Creates a step run.

Parameters:

Name Type Description Default
step_run StepRunRequestModel

The step run to create.

required

Returns:

Type Description
StepRunResponseModel

The created step run.

Source code in zenml/zen_stores/rest_zen_store.py
def create_run_step(
    self, step_run: StepRunRequestModel
) -> StepRunResponseModel:
    """Creates a step run.

    Args:
        step_run: The step run to create.

    Returns:
        The created step run.
    """
    return self._create_resource(
        resource=step_run,
        response_model=StepRunResponseModel,
        route=STEPS,
    )
create_schedule(self, schedule)

Creates a new schedule.

Parameters:

Name Type Description Default
schedule ScheduleRequestModel

The schedule to create.

required

Returns:

Type Description
ScheduleResponseModel

The newly created schedule.

Source code in zenml/zen_stores/rest_zen_store.py
def create_schedule(
    self, schedule: ScheduleRequestModel
) -> ScheduleResponseModel:
    """Creates a new schedule.

    Args:
        schedule: The schedule to create.

    Returns:
        The newly created schedule.
    """
    return self._create_workspace_scoped_resource(
        resource=schedule,
        route=SCHEDULES,
        response_model=ScheduleResponseModel,
    )
create_service_connector(self, service_connector)

Creates a new service connector.

Parameters:

Name Type Description Default
service_connector ServiceConnectorRequestModel

Service connector to be created.

required

Returns:

Type Description
ServiceConnectorResponseModel

The newly created service connector.

Source code in zenml/zen_stores/rest_zen_store.py
def create_service_connector(
    self, service_connector: ServiceConnectorRequestModel
) -> ServiceConnectorResponseModel:
    """Creates a new service connector.

    Args:
        service_connector: Service connector to be created.

    Returns:
        The newly created service connector.
    """
    connector_model = self._create_workspace_scoped_resource(
        resource=service_connector,
        route=SERVICE_CONNECTORS,
        response_model=ServiceConnectorResponseModel,
    )
    self._populate_connector_type(connector_model)
    return connector_model
create_stack(*args, **kwargs)

Register a new stack.

Parameters:

Name Type Description Default
stack

The stack to register.

required

Returns:

Type Description
Any

The registered stack.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_stack_component(*args, **kwargs)

Create a stack component.

Parameters:

Name Type Description Default
component

The stack component to create.

required

Returns:

Type Description
Any

The created stack component.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_team(*args, **kwargs)

Creates a new team.

Parameters:

Name Type Description Default
team

The team model to create.

required

Returns:

Type Description
Any

The newly created team.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_team_role_assignment(self, team_role_assignment)

Creates a new team role assignment.

Parameters:

Name Type Description Default
team_role_assignment TeamRoleAssignmentRequestModel

The role assignment model to create.

required

Returns:

Type Description
TeamRoleAssignmentResponseModel

The newly created role assignment.

Source code in zenml/zen_stores/rest_zen_store.py
def create_team_role_assignment(
    self, team_role_assignment: TeamRoleAssignmentRequestModel
) -> TeamRoleAssignmentResponseModel:
    """Creates a new team role assignment.

    Args:
        team_role_assignment: The role assignment model to create.

    Returns:
        The newly created role assignment.
    """
    return self._create_resource(
        resource=team_role_assignment,
        route=TEAM_ROLE_ASSIGNMENTS,
        response_model=TeamRoleAssignmentResponseModel,
    )
create_user(*args, **kwargs)

Creates a new user.

Parameters:

Name Type Description Default
user

User to be created.

required

Returns:

Type Description
Any

The newly created user.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_user_role_assignment(self, user_role_assignment)

Creates a new role assignment.

Parameters:

Name Type Description Default
user_role_assignment UserRoleAssignmentRequestModel

The role assignment to create.

required

Returns:

Type Description
UserRoleAssignmentResponseModel

The newly created workspace.

Source code in zenml/zen_stores/rest_zen_store.py
def create_user_role_assignment(
    self, user_role_assignment: UserRoleAssignmentRequestModel
) -> UserRoleAssignmentResponseModel:
    """Creates a new role assignment.

    Args:
        user_role_assignment: The role assignment to create.

    Returns:
        The newly created workspace.
    """
    return self._create_resource(
        resource=user_role_assignment,
        route=USER_ROLE_ASSIGNMENTS,
        response_model=UserRoleAssignmentResponseModel,
    )
create_workspace(*args, **kwargs)

Creates a new workspace.

Parameters:

Name Type Description Default
workspace

The workspace to create.

required

Returns:

Type Description
Any

The newly created workspace.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete(self, path, params=None, **kwargs)

Make a DELETE request to the given endpoint path.

Parameters:

Name Type Description Default
path str

The path to the endpoint.

required
params Optional[Dict[str, Any]]

The query parameters to pass to the endpoint.

None
kwargs Any

Additional keyword arguments to pass to the request.

{}

Returns:

Type Description
Union[Dict[str, Any], List[Any], str, int, float, bool]

The response body.

Source code in zenml/zen_stores/rest_zen_store.py
def delete(
    self, path: str, params: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> Json:
    """Make a DELETE request to the given endpoint path.

    Args:
        path: The path to the endpoint.
        params: The query parameters to pass to the endpoint.
        kwargs: Additional keyword arguments to pass to the request.

    Returns:
        The response body.
    """
    logger.debug(f"Sending DELETE request to {path}...")
    return self._request(
        "DELETE",
        self.url + API + VERSION_1 + path,
        params=params,
        **kwargs,
    )
delete_artifact(self, artifact_id)

Deletes an artifact.

Parameters:

Name Type Description Default
artifact_id UUID

The ID of the artifact to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_artifact(self, artifact_id: UUID) -> None:
    """Deletes an artifact.

    Args:
        artifact_id: The ID of the artifact to delete.
    """
    self._delete_resource(resource_id=artifact_id, route=ARTIFACTS)
delete_build(self, build_id)

Deletes a build.

Parameters:

Name Type Description Default
build_id UUID

The ID of the build to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_build(self, build_id: UUID) -> None:
    """Deletes a build.

    Args:
        build_id: The ID of the build to delete.
    """
    self._delete_resource(
        resource_id=build_id,
        route=PIPELINE_BUILDS,
    )
delete_code_repository(self, code_repository_id)

Deletes a code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_code_repository(self, code_repository_id: UUID) -> None:
    """Deletes a code repository.

    Args:
        code_repository_id: The ID of the code repository to delete.
    """
    self._delete_resource(
        resource_id=code_repository_id, route=CODE_REPOSITORIES
    )
delete_deployment(self, deployment_id)

Deletes a deployment.

Parameters:

Name Type Description Default
deployment_id UUID

The ID of the deployment to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_deployment(self, deployment_id: UUID) -> None:
    """Deletes a deployment.

    Args:
        deployment_id: The ID of the deployment to delete.
    """
    self._delete_resource(
        resource_id=deployment_id,
        route=PIPELINE_DEPLOYMENTS,
    )
delete_flavor(*args, **kwargs)

Delete a stack component flavor.

Parameters:

Name Type Description Default
flavor_id

The ID of the stack component flavor to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_pipeline(*args, **kwargs)

Deletes a pipeline.

Parameters:

Name Type Description Default
pipeline_id

The ID of the pipeline to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_role(*args, **kwargs)

Deletes a role.

Parameters:

Name Type Description Default
role_name_or_id

Name or ID of the role to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_run(self, run_id)

Deletes a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_run(self, run_id: UUID) -> None:
    """Deletes a pipeline run.

    Args:
        run_id: The ID of the pipeline run to delete.
    """
    self._delete_resource(
        resource_id=run_id,
        route=RUNS,
    )
delete_schedule(self, schedule_id)

Deletes a schedule.

Parameters:

Name Type Description Default
schedule_id UUID

The ID of the schedule to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_schedule(self, schedule_id: UUID) -> None:
    """Deletes a schedule.

    Args:
        schedule_id: The ID of the schedule to delete.
    """
    self._delete_resource(
        resource_id=schedule_id,
        route=SCHEDULES,
    )
delete_service_connector(self, service_connector_id)

Deletes a service connector.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_service_connector(self, service_connector_id: UUID) -> None:
    """Deletes a service connector.

    Args:
        service_connector_id: The ID of the service connector to delete.
    """
    self._delete_resource(
        resource_id=service_connector_id, route=SERVICE_CONNECTORS
    )
delete_stack(*args, **kwargs)

Delete a stack.

Parameters:

Name Type Description Default
stack_id

The ID of the stack to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_stack_component(*args, **kwargs)

Delete a stack component.

Parameters:

Name Type Description Default
component_id

The ID of the stack component to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_team(*args, **kwargs)

Deletes a team.

Parameters:

Name Type Description Default
team_name_or_id

Name or ID of the team to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_team_role_assignment(self, team_role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
team_role_assignment_id UUID

The ID of the specific role assignment

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_team_role_assignment(
    self, team_role_assignment_id: UUID
) -> None:
    """Delete a specific role assignment.

    Args:
        team_role_assignment_id: The ID of the specific role assignment
    """
    self._delete_resource(
        resource_id=team_role_assignment_id,
        route=TEAM_ROLE_ASSIGNMENTS,
    )
delete_user(*args, **kwargs)

Deletes a user.

Parameters:

Name Type Description Default
user_name_or_id

The name or ID of the user to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_user_role_assignment(self, user_role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
user_role_assignment_id UUID

The ID of the specific role assignment

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_user_role_assignment(
    self, user_role_assignment_id: UUID
) -> None:
    """Delete a specific role assignment.

    Args:
        user_role_assignment_id: The ID of the specific role assignment
    """
    self._delete_resource(
        resource_id=user_role_assignment_id,
        route=USER_ROLE_ASSIGNMENTS,
    )
delete_workspace(*args, **kwargs)

Deletes a workspace.

Parameters:

Name Type Description Default
workspace_name_or_id

Name or ID of the workspace to delete.

required
Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
get(self, path, params=None, **kwargs)

Make a GET request to the given endpoint path.

Parameters:

Name Type Description Default
path str

The path to the endpoint.

required
params Optional[Dict[str, Any]]

The query parameters to pass to the endpoint.

None
kwargs Any

Additional keyword arguments to pass to the request.

{}

Returns:

Type Description
Union[Dict[str, Any], List[Any], str, int, float, bool]

The response body.

Source code in zenml/zen_stores/rest_zen_store.py
def get(
    self, path: str, params: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> Json:
    """Make a GET request to the given endpoint path.

    Args:
        path: The path to the endpoint.
        params: The query parameters to pass to the endpoint.
        kwargs: Additional keyword arguments to pass to the request.

    Returns:
        The response body.
    """
    logger.debug(f"Sending GET request to {path}...")
    return self._request(
        "GET", self.url + API + VERSION_1 + path, params=params, **kwargs
    )
get_artifact(self, artifact_id)

Gets an artifact.

Parameters:

Name Type Description Default
artifact_id UUID

The ID of the artifact to get.

required

Returns:

Type Description
ArtifactResponseModel

The artifact.

Source code in zenml/zen_stores/rest_zen_store.py
def get_artifact(self, artifact_id: UUID) -> ArtifactResponseModel:
    """Gets an artifact.

    Args:
        artifact_id: The ID of the artifact to get.

    Returns:
        The artifact.
    """
    return self._get_resource(
        resource_id=artifact_id,
        route=ARTIFACTS,
        response_model=ArtifactResponseModel,
    )
get_auth_user(self, user_name_or_id)

Gets the auth model to a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Exceptions:

Type Description
NotImplementedError

This method is only available for the SQLZenStore.

Source code in zenml/zen_stores/rest_zen_store.py
def get_auth_user(
    self, user_name_or_id: Union[str, UUID]
) -> "UserAuthModel":
    """Gets the auth model to a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Raises:
        NotImplementedError: This method is only available for the
            SQLZenStore.
    """
    raise NotImplementedError(
        "This method is only designed for use"
        " by the server endpoints. It is not designed"
        " to be called from the client side."
    )
get_build(self, build_id)

Get a build with a given ID.

Parameters:

Name Type Description Default
build_id UUID

ID of the build.

required

Returns:

Type Description
PipelineBuildResponseModel

The build.

Source code in zenml/zen_stores/rest_zen_store.py
def get_build(self, build_id: UUID) -> PipelineBuildResponseModel:
    """Get a build with a given ID.

    Args:
        build_id: ID of the build.

    Returns:
        The build.
    """
    return self._get_resource(
        resource_id=build_id,
        route=PIPELINE_BUILDS,
        response_model=PipelineBuildResponseModel,
    )
get_code_repository(self, code_repository_id)

Gets a specific code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to get.

required

Returns:

Type Description
CodeRepositoryResponseModel

The requested code repository, if it was found.

Source code in zenml/zen_stores/rest_zen_store.py
def get_code_repository(
    self, code_repository_id: UUID
) -> CodeRepositoryResponseModel:
    """Gets a specific code repository.

    Args:
        code_repository_id: The ID of the code repository to get.

    Returns:
        The requested code repository, if it was found.
    """
    return self._get_resource(
        resource_id=code_repository_id,
        route=CODE_REPOSITORIES,
        response_model=CodeRepositoryResponseModel,
    )
get_deployment(self, deployment_id)

Get a deployment with a given ID.

Parameters:

Name Type Description Default
deployment_id UUID

ID of the deployment.

required

Returns:

Type Description
PipelineDeploymentResponseModel

The deployment.

Source code in zenml/zen_stores/rest_zen_store.py
def get_deployment(
    self, deployment_id: UUID
) -> PipelineDeploymentResponseModel:
    """Get a deployment with a given ID.

    Args:
        deployment_id: ID of the deployment.

    Returns:
        The deployment.
    """
    return self._get_resource(
        resource_id=deployment_id,
        route=PIPELINE_DEPLOYMENTS,
        response_model=PipelineDeploymentResponseModel,
    )
get_flavor(self, flavor_id)

Get a stack component flavor by ID.

Parameters:

Name Type Description Default
flavor_id UUID

The ID of the stack component flavor to get.

required

Returns:

Type Description
FlavorResponseModel

The stack component flavor.

Source code in zenml/zen_stores/rest_zen_store.py
def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
    """Get a stack component flavor by ID.

    Args:
        flavor_id: The ID of the stack component flavor to get.

    Returns:
        The stack component flavor.
    """
    return self._get_resource(
        resource_id=flavor_id,
        route=FLAVORS,
        response_model=FlavorResponseModel,
    )
get_or_create_run(self, pipeline_run)

Gets or creates a pipeline run.

If a run with the same ID or name already exists, it is returned. Otherwise, a new run is created.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to get or create.

required

Returns:

Type Description
Tuple[zenml.models.pipeline_run_models.PipelineRunResponseModel, bool]

The pipeline run, and a boolean indicating whether the run was created or not.

Source code in zenml/zen_stores/rest_zen_store.py
def get_or_create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> Tuple[PipelineRunResponseModel, bool]:
    """Gets or creates a pipeline run.

    If a run with the same ID or name already exists, it is returned.
    Otherwise, a new run is created.

    Args:
        pipeline_run: The pipeline run to get or create.

    Returns:
        The pipeline run, and a boolean indicating whether the run was
        created or not.
    """
    return self._get_or_create_workspace_scoped_resource(
        resource=pipeline_run,
        route=RUNS,
        response_model=PipelineRunResponseModel,
    )
get_pipeline(self, pipeline_id)

Get a pipeline with a given ID.

Parameters:

Name Type Description Default
pipeline_id UUID

ID of the pipeline.

required

Returns:

Type Description
PipelineResponseModel

The pipeline.

Source code in zenml/zen_stores/rest_zen_store.py
def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
    """Get a pipeline with a given ID.

    Args:
        pipeline_id: ID of the pipeline.

    Returns:
        The pipeline.
    """
    return self._get_resource(
        resource_id=pipeline_id,
        route=PIPELINES,
        response_model=PipelineResponseModel,
    )
get_role(self, role_name_or_id)

Gets a specific role.

Parameters:

Name Type Description Default
role_name_or_id Union[str, uuid.UUID]

Name or ID of the role to get.

required

Returns:

Type Description
RoleResponseModel

The requested role.

Source code in zenml/zen_stores/rest_zen_store.py
def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
    """Gets a specific role.

    Args:
        role_name_or_id: Name or ID of the role to get.

    Returns:
        The requested role.
    """
    return self._get_resource(
        resource_id=role_name_or_id,
        route=ROLES,
        response_model=RoleResponseModel,
    )
get_run(self, run_name_or_id)

Gets a pipeline run.

Parameters:

Name Type Description Default
run_name_or_id Union[uuid.UUID, str]

The name or ID of the pipeline run to get.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Source code in zenml/zen_stores/rest_zen_store.py
def get_run(
    self, run_name_or_id: Union[UUID, str]
) -> PipelineRunResponseModel:
    """Gets a pipeline run.

    Args:
        run_name_or_id: The name or ID of the pipeline run to get.

    Returns:
        The pipeline run.
    """
    return self._get_resource(
        resource_id=run_name_or_id,
        route=RUNS,
        response_model=PipelineRunResponseModel,
    )
get_run_step(self, step_run_id)

Get a step run by ID.

Parameters:

Name Type Description Default
step_run_id UUID

The ID of the step run to get.

required

Returns:

Type Description
StepRunResponseModel

The step run.

Source code in zenml/zen_stores/rest_zen_store.py
def get_run_step(self, step_run_id: UUID) -> StepRunResponseModel:
    """Get a step run by ID.

    Args:
        step_run_id: The ID of the step run to get.

    Returns:
        The step run.
    """
    return self._get_resource(
        resource_id=step_run_id,
        route=STEPS,
        response_model=StepRunResponseModel,
    )
get_schedule(self, schedule_id)

Get a schedule with a given ID.

Parameters:

Name Type Description Default
schedule_id UUID

ID of the schedule.

required

Returns:

Type Description
ScheduleResponseModel

The schedule.

Source code in zenml/zen_stores/rest_zen_store.py
def get_schedule(self, schedule_id: UUID) -> ScheduleResponseModel:
    """Get a schedule with a given ID.

    Args:
        schedule_id: ID of the schedule.

    Returns:
        The schedule.
    """
    return self._get_resource(
        resource_id=schedule_id,
        route=SCHEDULES,
        response_model=ScheduleResponseModel,
    )
get_service_connector(self, service_connector_id)

Gets a specific service connector.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to get.

required

Returns:

Type Description
ServiceConnectorResponseModel

The requested service connector, if it was found.

Source code in zenml/zen_stores/rest_zen_store.py
def get_service_connector(
    self, service_connector_id: UUID
) -> ServiceConnectorResponseModel:
    """Gets a specific service connector.

    Args:
        service_connector_id: The ID of the service connector to get.

    Returns:
        The requested service connector, if it was found.
    """
    connector_model = self._get_resource(
        resource_id=service_connector_id,
        route=SERVICE_CONNECTORS,
        response_model=ServiceConnectorResponseModel,
        params={"expand_secrets": False},
    )
    self._populate_connector_type(connector_model)
    return connector_model
get_service_connector_client(self, service_connector_id, resource_type=None, resource_id=None)

Get a service connector client for a service connector and given resource.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the base service connector to use.

required
resource_type Optional[str]

The type of resource to get a client for.

None
resource_id Optional[str]

The ID of the resource to get a client for.

None

Returns:

Type Description
ServiceConnectorResponseModel

A service connector client that can be used to access the given resource.

Source code in zenml/zen_stores/rest_zen_store.py
def get_service_connector_client(
    self,
    service_connector_id: UUID,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
) -> ServiceConnectorResponseModel:
    """Get a service connector client for a service connector and given resource.

    Args:
        service_connector_id: The ID of the base service connector to use.
        resource_type: The type of resource to get a client for.
        resource_id: The ID of the resource to get a client for.

    Returns:
        A service connector client that can be used to access the given
        resource.
    """
    params = {}
    if resource_type:
        params["resource_type"] = resource_type
    if resource_id:
        params["resource_id"] = resource_id
    response_body = self.get(
        f"{SERVICE_CONNECTORS}/{str(service_connector_id)}{SERVICE_CONNECTOR_CLIENT}",
        params=params,
    )

    connector = ServiceConnectorResponseModel.parse_obj(response_body)
    self._populate_connector_type(connector)
    return connector
get_service_connector_type(self, connector_type)

Returns the requested service connector type.

Parameters:

Name Type Description Default
connector_type str

the service connector type identifier.

required

Returns:

Type Description
ServiceConnectorTypeModel

The requested service connector type.

Source code in zenml/zen_stores/rest_zen_store.py
def get_service_connector_type(
    self,
    connector_type: str,
) -> ServiceConnectorTypeModel:
    """Returns the requested service connector type.

    Args:
        connector_type: the service connector type identifier.

    Returns:
        The requested service connector type.
    """
    # Use the local registry to get the service connector type, if it
    # exists.
    local_connector_type: Optional[ServiceConnectorTypeModel] = None
    if service_connector_registry.is_registered(connector_type):
        local_connector_type = (
            service_connector_registry.get_service_connector_type(
                connector_type
            )
        )
    try:
        response_body = self.get(
            f"{SERVICE_CONNECTOR_TYPES}/{connector_type}",
        )
        remote_connector_type = ServiceConnectorTypeModel.parse_obj(
            response_body
        )
        if local_connector_type:
            # If locally available, return the local connector type but
            # mark it as being remotely available.
            local_connector_type.remote = True
            return local_connector_type

        # Mark the remote connector type as being only remotely available
        remote_connector_type.local = False
        remote_connector_type.remote = True

        return remote_connector_type
    except KeyError:
        # If the service connector type is not found, check the local
        # registry.
        return service_connector_registry.get_service_connector_type(
            connector_type
        )
get_stack(self, stack_id)

Get a stack by its unique ID.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack to get.

required

Returns:

Type Description
StackResponseModel

The stack with the given ID.

Source code in zenml/zen_stores/rest_zen_store.py
def get_stack(self, stack_id: UUID) -> StackResponseModel:
    """Get a stack by its unique ID.

    Args:
        stack_id: The ID of the stack to get.

    Returns:
        The stack with the given ID.
    """
    return self._get_resource(
        resource_id=stack_id,
        route=STACKS,
        response_model=StackResponseModel,
    )
get_stack_component(self, component_id)

Get a stack component by ID.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to get.

required

Returns:

Type Description
ComponentResponseModel

The stack component.

Source code in zenml/zen_stores/rest_zen_store.py
def get_stack_component(
    self, component_id: UUID
) -> ComponentResponseModel:
    """Get a stack component by ID.

    Args:
        component_id: The ID of the stack component to get.

    Returns:
        The stack component.
    """
    return self._get_resource(
        resource_id=component_id,
        route=STACK_COMPONENTS,
        response_model=ComponentResponseModel,
    )
get_store_info(self)

Get information about the server.

Returns:

Type Description
ServerModel

Information about the server.

Source code in zenml/zen_stores/rest_zen_store.py
def get_store_info(self) -> ServerModel:
    """Get information about the server.

    Returns:
        Information about the server.
    """
    body = self.get(INFO)
    return ServerModel.parse_obj(body)
get_team(self, team_name_or_id)

Gets a specific team.

Parameters:

Name Type Description Default
team_name_or_id Union[str, uuid.UUID]

Name or ID of the team to get.

required

Returns:

Type Description
TeamResponseModel

The requested team.

Source code in zenml/zen_stores/rest_zen_store.py
def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
    """Gets a specific team.

    Args:
        team_name_or_id: Name or ID of the team to get.

    Returns:
        The requested team.
    """
    return self._get_resource(
        resource_id=team_name_or_id,
        route=TEAMS,
        response_model=TeamResponseModel,
    )
get_team_role_assignment(self, team_role_assignment_id)

Gets a specific role assignment.

Parameters:

Name Type Description Default
team_role_assignment_id UUID

ID of the role assignment to get.

required

Returns:

Type Description
TeamRoleAssignmentResponseModel

The requested role assignment.

Source code in zenml/zen_stores/rest_zen_store.py
def get_team_role_assignment(
    self, team_role_assignment_id: UUID
) -> TeamRoleAssignmentResponseModel:
    """Gets a specific role assignment.

    Args:
        team_role_assignment_id: ID of the role assignment to get.

    Returns:
        The requested role assignment.
    """
    return self._get_resource(
        resource_id=team_role_assignment_id,
        route=TEAM_ROLE_ASSIGNMENTS,
        response_model=TeamRoleAssignmentResponseModel,
    )
get_user(self, user_name_or_id=None, include_private=False)

Gets a specific user, when no id is specified the active user is returned.

The include_private parameter is ignored here as it is handled implicitly by the /current-user endpoint that is queried when no user_name_or_id is set. Raises a KeyError in case a user with that id does not exist.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

None
include_private bool

Whether to include private user information

False

Returns:

Type Description
UserResponseModel

The requested user, if it was found.

Source code in zenml/zen_stores/rest_zen_store.py
def get_user(
    self,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    include_private: bool = False,
) -> UserResponseModel:
    """Gets a specific user, when no id is specified the active user is returned.

    The `include_private` parameter is ignored here as it is handled
    implicitly by the /current-user endpoint that is queried when no
    user_name_or_id is set. Raises a KeyError in case a user with that id
    does not exist.

    Args:
        user_name_or_id: The name or ID of the user to get.
        include_private: Whether to include private user information

    Returns:
        The requested user, if it was found.
    """
    if user_name_or_id:
        return self._get_resource(
            resource_id=user_name_or_id,
            route=USERS,
            response_model=UserResponseModel,
        )
    else:
        body = self.get(CURRENT_USER)
        return UserResponseModel.parse_obj(body)
get_user_role_assignment(self, user_role_assignment_id)

Get an existing role assignment by name or ID.

Parameters:

Name Type Description Default
user_role_assignment_id UUID

Name or ID of the role assignment to get.

required

Returns:

Type Description
UserRoleAssignmentResponseModel

The requested workspace.

Source code in zenml/zen_stores/rest_zen_store.py
def get_user_role_assignment(
    self, user_role_assignment_id: UUID
) -> UserRoleAssignmentResponseModel:
    """Get an existing role assignment by name or ID.

    Args:
        user_role_assignment_id: Name or ID of the role assignment to get.

    Returns:
        The requested workspace.
    """
    return self._get_resource(
        resource_id=user_role_assignment_id,
        route=USER_ROLE_ASSIGNMENTS,
        response_model=UserRoleAssignmentResponseModel,
    )
get_workspace(self, workspace_name_or_id)

Get an existing workspace by name or ID.

Parameters:

Name Type Description Default
workspace_name_or_id Union[uuid.UUID, str]

Name or ID of the workspace to get.

required

Returns:

Type Description
WorkspaceResponseModel

The requested workspace.

Source code in zenml/zen_stores/rest_zen_store.py
def get_workspace(
    self, workspace_name_or_id: Union[UUID, str]
) -> WorkspaceResponseModel:
    """Get an existing workspace by name or ID.

    Args:
        workspace_name_or_id: Name or ID of the workspace to get.

    Returns:
        The requested workspace.
    """
    return self._get_resource(
        resource_id=workspace_name_or_id,
        route=WORKSPACES,
        response_model=WorkspaceResponseModel,
    )
list_artifacts(self, artifact_filter_model)

List all artifacts matching the given filter criteria.

Parameters:

Name Type Description Default
artifact_filter_model ArtifactFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ArtifactResponseModel]

A list of all artifacts matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_artifacts(
    self, artifact_filter_model: ArtifactFilterModel
) -> Page[ArtifactResponseModel]:
    """List all artifacts matching the given filter criteria.

    Args:
        artifact_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all artifacts matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=ARTIFACTS,
        response_model=ArtifactResponseModel,
        filter_model=artifact_filter_model,
    )
list_builds(self, build_filter_model)

List all builds matching the given filter criteria.

Parameters:

Name Type Description Default
build_filter_model PipelineBuildFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineBuildResponseModel]

A page of all builds matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_builds(
    self, build_filter_model: PipelineBuildFilterModel
) -> Page[PipelineBuildResponseModel]:
    """List all builds matching the given filter criteria.

    Args:
        build_filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all builds matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=PIPELINE_BUILDS,
        response_model=PipelineBuildResponseModel,
        filter_model=build_filter_model,
    )
list_code_repositories(self, filter_model)

List all code repositories.

Parameters:

Name Type Description Default
filter_model CodeRepositoryFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[CodeRepositoryResponseModel]

A page of all code repositories.

Source code in zenml/zen_stores/rest_zen_store.py
def list_code_repositories(
    self, filter_model: CodeRepositoryFilterModel
) -> Page[CodeRepositoryResponseModel]:
    """List all code repositories.

    Args:
        filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all code repositories.
    """
    return self._list_paginated_resources(
        route=CODE_REPOSITORIES,
        response_model=CodeRepositoryResponseModel,
        filter_model=filter_model,
    )
list_deployments(self, deployment_filter_model)

List all deployments matching the given filter criteria.

Parameters:

Name Type Description Default
deployment_filter_model PipelineDeploymentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineDeploymentResponseModel]

A page of all deployments matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_deployments(
    self, deployment_filter_model: PipelineDeploymentFilterModel
) -> Page[PipelineDeploymentResponseModel]:
    """List all deployments matching the given filter criteria.

    Args:
        deployment_filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all deployments matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=PIPELINE_DEPLOYMENTS,
        response_model=PipelineDeploymentResponseModel,
        filter_model=deployment_filter_model,
    )
list_flavors(self, flavor_filter_model)

List all stack component flavors matching the given filter criteria.

Parameters:

Name Type Description Default
flavor_filter_model FlavorFilterModel

All filter parameters including pagination params

required

Returns:

Type Description
Page[FlavorResponseModel]

List of all the stack component flavors matching the given criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_flavors(
    self, flavor_filter_model: FlavorFilterModel
) -> Page[FlavorResponseModel]:
    """List all stack component flavors matching the given filter criteria.

    Args:
        flavor_filter_model: All filter parameters including pagination
            params

    Returns:
        List of all the stack component flavors matching the given criteria.
    """
    return self._list_paginated_resources(
        route=FLAVORS,
        response_model=FlavorResponseModel,
        filter_model=flavor_filter_model,
    )
list_pipelines(self, pipeline_filter_model)

List all pipelines matching the given filter criteria.

Parameters:

Name Type Description Default
pipeline_filter_model PipelineFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineResponseModel]

A list of all pipelines matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_pipelines(
    self, pipeline_filter_model: PipelineFilterModel
) -> Page[PipelineResponseModel]:
    """List all pipelines matching the given filter criteria.

    Args:
        pipeline_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all pipelines matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=PIPELINES,
        response_model=PipelineResponseModel,
        filter_model=pipeline_filter_model,
    )
list_roles(self, role_filter_model)

List all roles matching the given filter criteria.

Parameters:

Name Type Description Default
role_filter_model RoleFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[RoleResponseModel]

A list of all roles matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_roles(
    self, role_filter_model: RoleFilterModel
) -> Page[RoleResponseModel]:
    """List all roles matching the given filter criteria.

    Args:
        role_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all roles matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=ROLES,
        response_model=RoleResponseModel,
        filter_model=role_filter_model,
    )
list_run_metadata(self, run_metadata_filter_model)

List run metadata.

Parameters:

Name Type Description Default
run_metadata_filter_model RunMetadataFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[RunMetadataResponseModel]

The run metadata.

Source code in zenml/zen_stores/rest_zen_store.py
def list_run_metadata(
    self,
    run_metadata_filter_model: RunMetadataFilterModel,
) -> Page[RunMetadataResponseModel]:
    """List run metadata.

    Args:
        run_metadata_filter_model: All filter parameters including
            pagination params.

    Returns:
        The run metadata.
    """
    return self._list_paginated_resources(
        route=RUN_METADATA,
        response_model=RunMetadataResponseModel,
        filter_model=run_metadata_filter_model,
    )
list_run_steps(self, step_run_filter_model)

List all step runs matching the given filter criteria.

Parameters:

Name Type Description Default
step_run_filter_model StepRunFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[StepRunResponseModel]

A list of all step runs matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_run_steps(
    self, step_run_filter_model: StepRunFilterModel
) -> Page[StepRunResponseModel]:
    """List all step runs matching the given filter criteria.

    Args:
        step_run_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all step runs matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=STEPS,
        response_model=StepRunResponseModel,
        filter_model=step_run_filter_model,
    )
list_runs(self, runs_filter_model)

List all pipeline runs matching the given filter criteria.

Parameters:

Name Type Description Default
runs_filter_model PipelineRunFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineRunResponseModel]

A list of all pipeline runs matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_runs(
    self, runs_filter_model: PipelineRunFilterModel
) -> Page[PipelineRunResponseModel]:
    """List all pipeline runs matching the given filter criteria.

    Args:
        runs_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all pipeline runs matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=RUNS,
        response_model=PipelineRunResponseModel,
        filter_model=runs_filter_model,
    )
list_schedules(self, schedule_filter_model)

List all schedules in the workspace.

Parameters:

Name Type Description Default
schedule_filter_model ScheduleFilterModel

All filter parameters including pagination params

required

Returns:

Type Description
Page[ScheduleResponseModel]

A list of schedules.

Source code in zenml/zen_stores/rest_zen_store.py
def list_schedules(
    self, schedule_filter_model: ScheduleFilterModel
) -> Page[ScheduleResponseModel]:
    """List all schedules in the workspace.

    Args:
        schedule_filter_model: All filter parameters including pagination
            params

    Returns:
        A list of schedules.
    """
    return self._list_paginated_resources(
        route=SCHEDULES,
        response_model=ScheduleResponseModel,
        filter_model=schedule_filter_model,
    )
list_service_connector_resources(self, user_name_or_id, workspace_name_or_id, connector_type=None, resource_type=None, resource_id=None)

List resources that can be accessed by service connectors.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to scope to.

required
workspace_name_or_id Union[str, uuid.UUID]

The name or ID of the workspace to scope to.

required
connector_type Optional[str]

The type of service connector to scope to.

None
resource_type Optional[str]

The type of resource to scope to.

None
resource_id Optional[str]

The ID of the resource to scope to.

None

Returns:

Type Description
List[zenml.models.service_connector_models.ServiceConnectorResourcesModel]

The matching list of resources that available service connectors have access to.

Source code in zenml/zen_stores/rest_zen_store.py
def list_service_connector_resources(
    self,
    user_name_or_id: Union[str, UUID],
    workspace_name_or_id: Union[str, UUID],
    connector_type: Optional[str] = None,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
) -> List[ServiceConnectorResourcesModel]:
    """List resources that can be accessed by service connectors.

    Args:
        user_name_or_id: The name or ID of the user to scope to.
        workspace_name_or_id: The name or ID of the workspace to scope to.
        connector_type: The type of service connector to scope to.
        resource_type: The type of resource to scope to.
        resource_id: The ID of the resource to scope to.

    Returns:
        The matching list of resources that available service
        connectors have access to.
    """
    params = {}
    if connector_type:
        params["connector_type"] = connector_type
    if resource_type:
        params["resource_type"] = resource_type
    if resource_id:
        params["resource_id"] = resource_id
    response_body = self.get(
        f"{WORKSPACES}/{workspace_name_or_id}{SERVICE_CONNECTORS}{SERVICE_CONNECTOR_RESOURCES}",
        params=params,
    )

    assert isinstance(response_body, list)
    resource_list = [
        ServiceConnectorResourcesModel.parse_obj(item)
        for item in response_body
    ]

    self._populate_connector_type(*resource_list)

    # For service connectors with types that are only locally available,
    # we need to retrieve the resource list locally
    for idx, resources in enumerate(resource_list):
        if isinstance(resources.connector_type, str):
            # Skip connector types that are neither locally nor remotely
            # available
            continue
        if resources.connector_type.remote:
            # Skip connector types that are remotely available
            continue

        # Retrieve the resource list locally
        assert resources.id is not None
        connector = self.get_service_connector(resources.id)
        connector_instance = (
            service_connector_registry.instantiate_connector(
                model=connector
            )
        )

        try:
            local_resources = connector_instance.verify(
                resource_type=resource_type,
                resource_id=resource_id,
            )
        except (ValueError, AuthorizationException) as e:
            logger.error(
                f'Failed to fetch {resource_type or "available"} '
                f"resources from service connector {connector.name}/"
                f"{connector.id}: {e}"
            )
            continue

        resource_list[idx] = local_resources

    return resource_list
list_service_connector_types(self, connector_type=None, resource_type=None, auth_method=None)

Get a list of service connector types.

Parameters:

Name Type Description Default
connector_type Optional[str]

Filter by connector type.

None
resource_type Optional[str]

Filter by resource type.

None
auth_method Optional[str]

Filter by authentication method.

None

Returns:

Type Description
List[zenml.models.service_connector_models.ServiceConnectorTypeModel]

List of service connector types.

Source code in zenml/zen_stores/rest_zen_store.py
def list_service_connector_types(
    self,
    connector_type: Optional[str] = None,
    resource_type: Optional[str] = None,
    auth_method: Optional[str] = None,
) -> List[ServiceConnectorTypeModel]:
    """Get a list of service connector types.

    Args:
        connector_type: Filter by connector type.
        resource_type: Filter by resource type.
        auth_method: Filter by authentication method.

    Returns:
        List of service connector types.
    """
    params = {}
    if connector_type:
        params["connector_type"] = connector_type
    if resource_type:
        params["resource_type"] = resource_type
    if auth_method:
        params["auth_method"] = auth_method
    response_body = self.get(
        SERVICE_CONNECTOR_TYPES,
        params=params,
    )

    assert isinstance(response_body, list)
    remote_connector_types = [
        ServiceConnectorTypeModel.parse_obj(item) for item in response_body
    ]

    # Mark the remote connector types as being only remotely available
    for c in remote_connector_types:
        c.local = False
        c.remote = True

    local_connector_types = (
        service_connector_registry.list_service_connector_types(
            connector_type=connector_type,
            resource_type=resource_type,
            auth_method=auth_method,
        )
    )

    # Add the connector types in the local registry to the list of
    # connector types available remotely. Overwrite those that have
    # the same connector type but mark them as being remotely available.
    connector_types_map = {
        connector_type.connector_type: connector_type
        for connector_type in remote_connector_types
    }

    for connector in local_connector_types:
        if connector.connector_type in connector_types_map:
            connector.remote = True
        connector_types_map[connector.connector_type] = connector

    return list(connector_types_map.values())
list_service_connectors(self, filter_model)

List all service connectors.

Parameters:

Name Type Description Default
filter_model ServiceConnectorFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ServiceConnectorResponseModel]

A page of all service connectors.

Source code in zenml/zen_stores/rest_zen_store.py
def list_service_connectors(
    self, filter_model: ServiceConnectorFilterModel
) -> Page[ServiceConnectorResponseModel]:
    """List all service connectors.

    Args:
        filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all service connectors.
    """
    connector_models = self._list_paginated_resources(
        route=SERVICE_CONNECTORS,
        response_model=ServiceConnectorResponseModel,
        filter_model=filter_model,
        params={"expand_secrets": False},
    )
    self._populate_connector_type(*connector_models.items)
    return connector_models
list_stack_components(self, component_filter_model)

List all stack components matching the given filter criteria.

Parameters:

Name Type Description Default
component_filter_model ComponentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ComponentResponseModel]

A list of all stack components matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_stack_components(
    self, component_filter_model: ComponentFilterModel
) -> Page[ComponentResponseModel]:
    """List all stack components matching the given filter criteria.

    Args:
        component_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all stack components matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=STACK_COMPONENTS,
        response_model=ComponentResponseModel,
        filter_model=component_filter_model,
    )
list_stacks(self, stack_filter_model)

List all stacks matching the given filter criteria.

Parameters:

Name Type Description Default
stack_filter_model StackFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[StackResponseModel]

A list of all stacks matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_stacks(
    self, stack_filter_model: StackFilterModel
) -> Page[StackResponseModel]:
    """List all stacks matching the given filter criteria.

    Args:
        stack_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all stacks matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=STACKS,
        response_model=StackResponseModel,
        filter_model=stack_filter_model,
    )
list_team_role_assignments(self, team_role_assignment_filter_model)

List all roles assignments matching the given filter criteria.

Parameters:

Name Type Description Default
team_role_assignment_filter_model TeamRoleAssignmentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[TeamRoleAssignmentResponseModel]

A list of all roles assignments matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_team_role_assignments(
    self, team_role_assignment_filter_model: TeamRoleAssignmentFilterModel
) -> Page[TeamRoleAssignmentResponseModel]:
    """List all roles assignments matching the given filter criteria.

    Args:
        team_role_assignment_filter_model: All filter parameters including
            pagination params.

    Returns:
        A list of all roles assignments matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=TEAM_ROLE_ASSIGNMENTS,
        response_model=TeamRoleAssignmentResponseModel,
        filter_model=team_role_assignment_filter_model,
    )
list_teams(self, team_filter_model)

List all teams matching the given filter criteria.

Parameters:

Name Type Description Default
team_filter_model TeamFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[TeamResponseModel]

A list of all teams matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_teams(
    self, team_filter_model: TeamFilterModel
) -> Page[TeamResponseModel]:
    """List all teams matching the given filter criteria.

    Args:
        team_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all teams matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=TEAMS,
        response_model=TeamResponseModel,
        filter_model=team_filter_model,
    )
list_user_role_assignments(self, user_role_assignment_filter_model)

List all roles assignments matching the given filter criteria.

Parameters:

Name Type Description Default
user_role_assignment_filter_model UserRoleAssignmentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[UserRoleAssignmentResponseModel]

A list of all roles assignments matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_user_role_assignments(
    self, user_role_assignment_filter_model: UserRoleAssignmentFilterModel
) -> Page[UserRoleAssignmentResponseModel]:
    """List all roles assignments matching the given filter criteria.

    Args:
        user_role_assignment_filter_model: All filter parameters including
            pagination params.

    Returns:
        A list of all roles assignments matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=USER_ROLE_ASSIGNMENTS,
        response_model=UserRoleAssignmentResponseModel,
        filter_model=user_role_assignment_filter_model,
    )
list_users(self, user_filter_model)

List all users.

Parameters:

Name Type Description Default
user_filter_model UserFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[UserResponseModel]

A list of all users.

Source code in zenml/zen_stores/rest_zen_store.py
def list_users(
    self, user_filter_model: UserFilterModel
) -> Page[UserResponseModel]:
    """List all users.

    Args:
        user_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all users.
    """
    return self._list_paginated_resources(
        route=USERS,
        response_model=UserResponseModel,
        filter_model=user_filter_model,
    )
list_workspaces(self, workspace_filter_model)

List all workspace matching the given filter criteria.

Parameters:

Name Type Description Default
workspace_filter_model WorkspaceFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[WorkspaceResponseModel]

A list of all workspace matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_workspaces(
    self, workspace_filter_model: WorkspaceFilterModel
) -> Page[WorkspaceResponseModel]:
    """List all workspace matching the given filter criteria.

    Args:
        workspace_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all workspace matching the filter criteria.
    """
    return self._list_paginated_resources(
        route=WORKSPACES,
        response_model=WorkspaceResponseModel,
        filter_model=workspace_filter_model,
    )
post(self, path, body, params=None, **kwargs)

Make a POST request to the given endpoint path.

Parameters:

Name Type Description Default
path str

The path to the endpoint.

required
body BaseModel

The body to send.

required
params Optional[Dict[str, Any]]

The query parameters to pass to the endpoint.

None
kwargs Any

Additional keyword arguments to pass to the request.

{}

Returns:

Type Description
Union[Dict[str, Any], List[Any], str, int, float, bool]

The response body.

Source code in zenml/zen_stores/rest_zen_store.py
def post(
    self,
    path: str,
    body: BaseModel,
    params: Optional[Dict[str, Any]] = None,
    **kwargs: Any,
) -> Json:
    """Make a POST request to the given endpoint path.

    Args:
        path: The path to the endpoint.
        body: The body to send.
        params: The query parameters to pass to the endpoint.
        kwargs: Additional keyword arguments to pass to the request.

    Returns:
        The response body.
    """
    logger.debug(f"Sending POST request to {path}...")
    return self._request(
        "POST",
        self.url + API + VERSION_1 + path,
        data=body.json(),
        params=params,
        **kwargs,
    )
put(self, path, body=None, params=None, **kwargs)

Make a PUT request to the given endpoint path.

Parameters:

Name Type Description Default
path str

The path to the endpoint.

required
body Optional[pydantic.main.BaseModel]

The body to send.

None
params Optional[Dict[str, Any]]

The query parameters to pass to the endpoint.

None
kwargs Any

Additional keyword arguments to pass to the request.

{}

Returns:

Type Description
Union[Dict[str, Any], List[Any], str, int, float, bool]

The response body.

Source code in zenml/zen_stores/rest_zen_store.py
def put(
    self,
    path: str,
    body: Optional[BaseModel] = None,
    params: Optional[Dict[str, Any]] = None,
    **kwargs: Any,
) -> Json:
    """Make a PUT request to the given endpoint path.

    Args:
        path: The path to the endpoint.
        body: The body to send.
        params: The query parameters to pass to the endpoint.
        kwargs: Additional keyword arguments to pass to the request.

    Returns:
        The response body.
    """
    logger.debug(f"Sending PUT request to {path}...")
    data = body.json(exclude_unset=True) if body else None
    return self._request(
        "PUT",
        self.url + API + VERSION_1 + path,
        data=data,
        params=params,
        **kwargs,
    )
update_code_repository(self, code_repository_id, update)

Updates an existing code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to update.

required
update CodeRepositoryUpdateModel

The update to be applied to the code repository.

required

Returns:

Type Description
CodeRepositoryResponseModel

The updated code repository.

Source code in zenml/zen_stores/rest_zen_store.py
def update_code_repository(
    self, code_repository_id: UUID, update: CodeRepositoryUpdateModel
) -> CodeRepositoryResponseModel:
    """Updates an existing code repository.

    Args:
        code_repository_id: The ID of the code repository to update.
        update: The update to be applied to the code repository.

    Returns:
        The updated code repository.
    """
    return self._update_resource(
        resource_id=code_repository_id,
        resource_update=update,
        response_model=CodeRepositoryResponseModel,
        route=CODE_REPOSITORIES,
    )
update_flavor(self, flavor_id, flavor_update)

Updates an existing user.

Parameters:

Name Type Description Default
flavor_id UUID

The id of the flavor to update.

required
flavor_update FlavorUpdateModel

The update to be applied to the flavor.

required

Returns:

Type Description
FlavorResponseModel

The updated flavor.

Source code in zenml/zen_stores/rest_zen_store.py
def update_flavor(
    self, flavor_id: UUID, flavor_update: FlavorUpdateModel
) -> FlavorResponseModel:
    """Updates an existing user.

    Args:
        flavor_id: The id of the flavor to update.
        flavor_update: The update to be applied to the flavor.

    Returns:
        The updated flavor.
    """
    return self._update_resource(
        resource_id=flavor_id,
        resource_update=flavor_update,
        route=FLAVORS,
        response_model=FlavorResponseModel,
    )
update_pipeline(*args, **kwargs)

Updates a pipeline.

Parameters:

Name Type Description Default
pipeline_id

The ID of the pipeline to be updated.

required
pipeline_update

The update to be applied.

required

Returns:

Type Description
Any

The updated pipeline.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_role(*args, **kwargs)

Update an existing role.

Parameters:

Name Type Description Default
role_id

The ID of the role to be updated.

required
role_update

The update to be applied to the role.

required

Returns:

Type Description
Any

The updated role.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_run(self, run_id, run_update)

Updates a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to update.

required
run_update PipelineRunUpdateModel

The update to be applied to the pipeline run.

required

Returns:

Type Description
PipelineRunResponseModel

The updated pipeline run.

Source code in zenml/zen_stores/rest_zen_store.py
def update_run(
    self, run_id: UUID, run_update: PipelineRunUpdateModel
) -> PipelineRunResponseModel:
    """Updates a pipeline run.

    Args:
        run_id: The ID of the pipeline run to update.
        run_update: The update to be applied to the pipeline run.


    Returns:
        The updated pipeline run.
    """
    return self._update_resource(
        resource_id=run_id,
        resource_update=run_update,
        response_model=PipelineRunResponseModel,
        route=RUNS,
    )
update_run_step(self, step_run_id, step_run_update)

Updates a step run.

Parameters:

Name Type Description Default
step_run_id UUID

The ID of the step to update.

required
step_run_update StepRunUpdateModel

The update to be applied to the step.

required

Returns:

Type Description
StepRunResponseModel

The updated step run.

Source code in zenml/zen_stores/rest_zen_store.py
def update_run_step(
    self,
    step_run_id: UUID,
    step_run_update: StepRunUpdateModel,
) -> StepRunResponseModel:
    """Updates a step run.

    Args:
        step_run_id: The ID of the step to update.
        step_run_update: The update to be applied to the step.

    Returns:
        The updated step run.
    """
    return self._update_resource(
        resource_id=step_run_id,
        resource_update=step_run_update,
        response_model=StepRunResponseModel,
        route=STEPS,
    )
update_schedule(self, schedule_id, schedule_update)

Updates a schedule.

Parameters:

Name Type Description Default
schedule_id UUID

The ID of the schedule to be updated.

required
schedule_update ScheduleUpdateModel

The update to be applied.

required

Returns:

Type Description
ScheduleResponseModel

The updated schedule.

Source code in zenml/zen_stores/rest_zen_store.py
def update_schedule(
    self,
    schedule_id: UUID,
    schedule_update: ScheduleUpdateModel,
) -> ScheduleResponseModel:
    """Updates a schedule.

    Args:
        schedule_id: The ID of the schedule to be updated.
        schedule_update: The update to be applied.

    Returns:
        The updated schedule.
    """
    return self._update_resource(
        resource_id=schedule_id,
        resource_update=schedule_update,
        route=SCHEDULES,
        response_model=ScheduleResponseModel,
    )
update_service_connector(self, service_connector_id, update)

Updates an existing service connector.

The update model contains the fields to be updated. If a field value is set to None in the model, the field is not updated, but there are special rules concerning some fields:

  • the configuration and secrets fields together represent a full valid configuration update, not just a partial update. If either is set (i.e. not None) in the update, their values are merged together and will replace the existing configuration and secrets values.
  • the resource_id field value is also a full replacement value: if set to None, the resource ID is removed from the service connector.
  • the expiration_seconds field value is also a full replacement value: if set to None, the expiration is removed from the service connector.
  • the secret_id field value in the update is ignored, given that secrets are managed internally by the ZenML store.
  • the labels field is also a full labels update: if set (i.e. not None), all existing labels are removed and replaced by the new labels in the update.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to update.

required
update ServiceConnectorUpdateModel

The update to be applied to the service connector.

required

Returns:

Type Description
ServiceConnectorResponseModel

The updated service connector.

Source code in zenml/zen_stores/rest_zen_store.py
def update_service_connector(
    self, service_connector_id: UUID, update: ServiceConnectorUpdateModel
) -> ServiceConnectorResponseModel:
    """Updates an existing service connector.

    The update model contains the fields to be updated. If a field value is
    set to None in the model, the field is not updated, but there are
    special rules concerning some fields:

    * the `configuration` and `secrets` fields together represent a full
    valid configuration update, not just a partial update. If either is
    set (i.e. not None) in the update, their values are merged together and
    will replace the existing configuration and secrets values.
    * the `resource_id` field value is also a full replacement value: if set
    to `None`, the resource ID is removed from the service connector.
    * the `expiration_seconds` field value is also a full replacement value:
    if set to `None`, the expiration is removed from the service connector.
    * the `secret_id` field value in the update is ignored, given that
    secrets are managed internally by the ZenML store.
    * the `labels` field is also a full labels update: if set (i.e. not
    `None`), all existing labels are removed and replaced by the new labels
    in the update.

    Args:
        service_connector_id: The ID of the service connector to update.
        update: The update to be applied to the service connector.

    Returns:
        The updated service connector.
    """
    connector_model = self._update_resource(
        resource_id=service_connector_id,
        resource_update=update,
        response_model=ServiceConnectorResponseModel,
        route=SERVICE_CONNECTORS,
    )
    self._populate_connector_type(connector_model)
    return connector_model
update_stack(*args, **kwargs)

Update a stack.

Parameters:

Name Type Description Default
stack_id

The ID of the stack update.

required
stack_update

The update request on the stack.

required

Returns:

Type Description
Any

The updated stack.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_stack_component(*args, **kwargs)

Update an existing stack component.

Parameters:

Name Type Description Default
component_id

The ID of the stack component to update.

required
component_update

The update to be applied to the stack component.

required

Returns:

Type Description
Any

The updated stack component.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_team(*args, **kwargs)

Update an existing team.

Parameters:

Name Type Description Default
team_id

The ID of the team to be updated.

required
team_update

The update to be applied to the team.

required

Returns:

Type Description
Any

The updated team.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_user(*args, **kwargs)

Updates an existing user.

Parameters:

Name Type Description Default
user_id

The id of the user to update.

required
user_update

The update to be applied to the user.

required

Returns:

Type Description
Any

The updated user.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_workspace(*args, **kwargs)

Update an existing workspace.

Parameters:

Name Type Description Default
workspace_id

The ID of the workspace to be updated.

required
workspace_update

The update to be applied to the workspace.

required

Returns:

Type Description
Any

The updated workspace.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
verify_service_connector(self, service_connector_id, resource_type=None, resource_id=None, list_resources=True)

Verifies if a service connector instance has access to one or more resources.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to verify.

required
resource_type Optional[str]

The type of resource to verify access to.

None
resource_id Optional[str]

The ID of the resource to verify access to.

None
list_resources bool

If True, the list of all resources accessible through the service connector and matching the supplied resource type and ID are returned.

True

Returns:

Type Description
ServiceConnectorResourcesModel

The list of resources that the service connector has access to, scoped to the supplied resource type and ID, if provided.

Source code in zenml/zen_stores/rest_zen_store.py
def verify_service_connector(
    self,
    service_connector_id: UUID,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
    list_resources: bool = True,
) -> ServiceConnectorResourcesModel:
    """Verifies if a service connector instance has access to one or more resources.

    Args:
        service_connector_id: The ID of the service connector to verify.
        resource_type: The type of resource to verify access to.
        resource_id: The ID of the resource to verify access to.
        list_resources: If True, the list of all resources accessible
            through the service connector and matching the supplied resource
            type and ID are returned.

    Returns:
        The list of resources that the service connector has access to,
        scoped to the supplied resource type and ID, if provided.
    """
    params: Dict[str, Any] = {"list_resources": list_resources}
    if resource_type:
        params["resource_type"] = resource_type
    if resource_id:
        params["resource_id"] = resource_id
    response_body = self.put(
        f"{SERVICE_CONNECTORS}/{str(service_connector_id)}{SERVICE_CONNECTOR_VERIFY}",
        params=params,
    )

    resources = ServiceConnectorResourcesModel.parse_obj(response_body)
    self._populate_connector_type(resources)
    return resources
verify_service_connector_config(self, service_connector, list_resources=True)

Verifies if a service connector configuration has access to resources.

Parameters:

Name Type Description Default
service_connector ServiceConnectorRequestModel

The service connector configuration to verify.

required
list_resources bool

If True, the list of all resources accessible through the service connector and matching the supplied resource type and ID are returned.

True

Returns:

Type Description
ServiceConnectorResourcesModel

The list of resources that the service connector configuration has access to.

Source code in zenml/zen_stores/rest_zen_store.py
def verify_service_connector_config(
    self,
    service_connector: ServiceConnectorRequestModel,
    list_resources: bool = True,
) -> ServiceConnectorResourcesModel:
    """Verifies if a service connector configuration has access to resources.

    Args:
        service_connector: The service connector configuration to verify.
        list_resources: If True, the list of all resources accessible
            through the service connector and matching the supplied resource
            type and ID are returned.

    Returns:
        The list of resources that the service connector configuration has
        access to.
    """
    response_body = self.post(
        f"{SERVICE_CONNECTORS}{SERVICE_CONNECTOR_VERIFY}",
        body=service_connector,
        params={"list_resources": list_resources},
    )

    resources = ServiceConnectorResourcesModel.parse_obj(response_body)
    self._populate_connector_type(resources)
    return resources

RestZenStoreConfiguration (StoreConfiguration) pydantic-model

REST ZenML store configuration.

Attributes:

Name Type Description
type StoreType

The type of the store.

secrets_store Optional[zenml.config.secrets_store_config.SecretsStoreConfiguration]

The configuration of the secrets store to use. This defaults to a REST secrets store that extends the REST ZenML store.

username Optional[str]

The username to use to connect to the Zen server.

password Optional[str]

The password to use to connect to the Zen server.

verify_ssl Union[bool, str]

Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use or the CA bundle value itself.

http_timeout int

The timeout to use for all requests.

Source code in zenml/zen_stores/rest_zen_store.py
class RestZenStoreConfiguration(StoreConfiguration):
    """REST ZenML store configuration.

    Attributes:
        type: The type of the store.
        secrets_store: The configuration of the secrets store to use.
            This defaults to a REST secrets store that extends the REST ZenML
            store.
        username: The username to use to connect to the Zen server.
        password: The password to use to connect to the Zen server.
        verify_ssl: Either a boolean, in which case it controls whether we
            verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use or the CA bundle value itself.
        http_timeout: The timeout to use for all requests.

    """

    type: StoreType = StoreType.REST

    secrets_store: Optional[SecretsStoreConfiguration] = None

    username: Optional[str] = None
    password: Optional[str] = None
    api_token: Optional[str] = None
    verify_ssl: Union[bool, str] = True
    http_timeout: int = DEFAULT_HTTP_TIMEOUT

    @validator("secrets_store")
    def validate_secrets_store(
        cls, secrets_store: Optional[SecretsStoreConfiguration]
    ) -> SecretsStoreConfiguration:
        """Ensures that the secrets store uses an associated REST secrets store.

        Args:
            secrets_store: The secrets store config to be validated.

        Returns:
            The validated secrets store config.

        Raises:
            ValueError: If the secrets store is not of type REST.
        """
        if secrets_store is None:
            secrets_store = RestSecretsStoreConfiguration()
        elif secrets_store.type != SecretsStoreType.REST:
            raise ValueError(
                "The secrets store associated with a REST zen store must be "
                f"of type REST, but is of type {secrets_store.type}."
            )

        return secrets_store

    @root_validator
    def validate_credentials(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        """Validates the credentials provided in the values dictionary.

        Args:
            values: A dictionary containing the values to be validated.

        Raises:
            ValueError: If neither api_token nor username is set.

        Returns:
            The values dictionary.
        """
        # Check if the values dictionary contains either an api_token or a
        # username as non-empty strings.
        if values.get("api_token") or values.get("username"):
            return values
        else:
            raise ValueError(
                "Neither api_token nor username is set in the store config."
            )

    @validator("url")
    def validate_url(cls, url: str) -> str:
        """Validates that the URL is a well-formed REST store URL.

        Args:
            url: The URL to be validated.

        Returns:
            The validated URL without trailing slashes.

        Raises:
            ValueError: If the URL is not a well-formed REST store URL.
        """
        url = url.rstrip("/")
        scheme = re.search("^([a-z0-9]+://)", url)
        if scheme is None or scheme.group() not in ("https://", "http://"):
            raise ValueError(
                "Invalid URL for REST store: {url}. Should be in the form "
                "https://hostname[:port] or http://hostname[:port]."
            )

        # When running inside a container, if the URL uses localhost, the
        # target service will not be available. We try to replace localhost
        # with one of the special Docker or K3D internal hostnames.
        url = replace_localhost_with_internal_hostname(url)

        return url

    @validator("verify_ssl")
    def validate_verify_ssl(
        cls, verify_ssl: Union[bool, str]
    ) -> Union[bool, str]:
        """Validates that the verify_ssl either points to a file or is a bool.

        Args:
            verify_ssl: The verify_ssl value to be validated.

        Returns:
            The validated verify_ssl value.
        """
        secret_folder = Path(
            GlobalConfiguration().local_stores_path,
            "certificates",
        )
        if isinstance(verify_ssl, bool) or verify_ssl.startswith(
            str(secret_folder)
        ):
            return verify_ssl

        if os.path.isfile(verify_ssl):
            with open(verify_ssl, "r") as f:
                verify_ssl = f.read()

        fileio.makedirs(str(secret_folder))
        file_path = Path(secret_folder, "ca_bundle.pem")
        with open(file_path, "w") as f:
            f.write(verify_ssl)
        file_path.chmod(0o600)
        verify_ssl = str(file_path)

        return verify_ssl

    @classmethod
    def supports_url_scheme(cls, url: str) -> bool:
        """Check if a URL scheme is supported by this store.

        Args:
            url: The URL to check.

        Returns:
            True if the URL scheme is supported, False otherwise.
        """
        return urlparse(url).scheme in ("http", "https")

    def expand_certificates(self) -> None:
        """Expands the certificates in the verify_ssl field."""
        # Load the certificate values back into the configuration
        if isinstance(self.verify_ssl, str) and os.path.isfile(
            self.verify_ssl
        ):
            with open(self.verify_ssl, "r") as f:
                self.verify_ssl = f.read()

    @classmethod
    def copy_configuration(
        cls,
        config: "StoreConfiguration",
        config_path: str,
        load_config_path: Optional[PurePath] = None,
    ) -> "StoreConfiguration":
        """Create a copy of the store config using a different path.

        This method is used to create a copy of the store configuration that can
        be loaded using a different configuration path or in the context of a
        new environment, such as a container image.

        The configuration files accompanying the store configuration are also
        copied to the new configuration path (e.g. certificates etc.).

        Args:
            config: The store configuration to copy.
            config_path: new path where the configuration copy will be loaded
                from.
            load_config_path: absolute path that will be used to load the copied
                configuration. This can be set to a value different from
                `config_path` if the configuration copy will be loaded from
                a different environment, e.g. when the configuration is copied
                to a container image and loaded using a different absolute path.
                This will be reflected in the paths and URLs encoded in the
                copied configuration.

        Returns:
            A new store configuration object that reflects the new configuration
            path.
        """
        assert isinstance(config, RestZenStoreConfiguration)
        assert config.api_token is not None
        config = config.copy(exclude={"username", "password"}, deep=True)
        # Load the certificate values back into the configuration
        config.expand_certificates()
        return config

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the `verify_ssl` attribute can be expanded to the contents
        # of the certificate file.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/rest_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the `verify_ssl` attribute can be expanded to the contents
    # of the certificate file.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
copy_configuration(config, config_path, load_config_path=None) classmethod

Create a copy of the store config using a different path.

This method is used to create a copy of the store configuration that can be loaded using a different configuration path or in the context of a new environment, such as a container image.

The configuration files accompanying the store configuration are also copied to the new configuration path (e.g. certificates etc.).

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to copy.

required
config_path str

new path where the configuration copy will be loaded from.

required
load_config_path Optional[pathlib.PurePath]

absolute path that will be used to load the copied configuration. This can be set to a value different from config_path if the configuration copy will be loaded from a different environment, e.g. when the configuration is copied to a container image and loaded using a different absolute path. This will be reflected in the paths and URLs encoded in the copied configuration.

None

Returns:

Type Description
StoreConfiguration

A new store configuration object that reflects the new configuration path.

Source code in zenml/zen_stores/rest_zen_store.py
@classmethod
def copy_configuration(
    cls,
    config: "StoreConfiguration",
    config_path: str,
    load_config_path: Optional[PurePath] = None,
) -> "StoreConfiguration":
    """Create a copy of the store config using a different path.

    This method is used to create a copy of the store configuration that can
    be loaded using a different configuration path or in the context of a
    new environment, such as a container image.

    The configuration files accompanying the store configuration are also
    copied to the new configuration path (e.g. certificates etc.).

    Args:
        config: The store configuration to copy.
        config_path: new path where the configuration copy will be loaded
            from.
        load_config_path: absolute path that will be used to load the copied
            configuration. This can be set to a value different from
            `config_path` if the configuration copy will be loaded from
            a different environment, e.g. when the configuration is copied
            to a container image and loaded using a different absolute path.
            This will be reflected in the paths and URLs encoded in the
            copied configuration.

    Returns:
        A new store configuration object that reflects the new configuration
        path.
    """
    assert isinstance(config, RestZenStoreConfiguration)
    assert config.api_token is not None
    config = config.copy(exclude={"username", "password"}, deep=True)
    # Load the certificate values back into the configuration
    config.expand_certificates()
    return config
expand_certificates(self)

Expands the certificates in the verify_ssl field.

Source code in zenml/zen_stores/rest_zen_store.py
def expand_certificates(self) -> None:
    """Expands the certificates in the verify_ssl field."""
    # Load the certificate values back into the configuration
    if isinstance(self.verify_ssl, str) and os.path.isfile(
        self.verify_ssl
    ):
        with open(self.verify_ssl, "r") as f:
            self.verify_ssl = f.read()
supports_url_scheme(url) classmethod

Check if a URL scheme is supported by this store.

Parameters:

Name Type Description Default
url str

The URL to check.

required

Returns:

Type Description
bool

True if the URL scheme is supported, False otherwise.

Source code in zenml/zen_stores/rest_zen_store.py
@classmethod
def supports_url_scheme(cls, url: str) -> bool:
    """Check if a URL scheme is supported by this store.

    Args:
        url: The URL to check.

    Returns:
        True if the URL scheme is supported, False otherwise.
    """
    return urlparse(url).scheme in ("http", "https")
validate_credentials(values) classmethod

Validates the credentials provided in the values dictionary.

Parameters:

Name Type Description Default
values Dict[str, Any]

A dictionary containing the values to be validated.

required

Exceptions:

Type Description
ValueError

If neither api_token nor username is set.

Returns:

Type Description
Dict[str, Any]

The values dictionary.

Source code in zenml/zen_stores/rest_zen_store.py
@root_validator
def validate_credentials(cls, values: Dict[str, Any]) -> Dict[str, Any]:
    """Validates the credentials provided in the values dictionary.

    Args:
        values: A dictionary containing the values to be validated.

    Raises:
        ValueError: If neither api_token nor username is set.

    Returns:
        The values dictionary.
    """
    # Check if the values dictionary contains either an api_token or a
    # username as non-empty strings.
    if values.get("api_token") or values.get("username"):
        return values
    else:
        raise ValueError(
            "Neither api_token nor username is set in the store config."
        )
validate_secrets_store(secrets_store) classmethod

Ensures that the secrets store uses an associated REST secrets store.

Parameters:

Name Type Description Default
secrets_store Optional[zenml.config.secrets_store_config.SecretsStoreConfiguration]

The secrets store config to be validated.

required

Returns:

Type Description
SecretsStoreConfiguration

The validated secrets store config.

Exceptions:

Type Description
ValueError

If the secrets store is not of type REST.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("secrets_store")
def validate_secrets_store(
    cls, secrets_store: Optional[SecretsStoreConfiguration]
) -> SecretsStoreConfiguration:
    """Ensures that the secrets store uses an associated REST secrets store.

    Args:
        secrets_store: The secrets store config to be validated.

    Returns:
        The validated secrets store config.

    Raises:
        ValueError: If the secrets store is not of type REST.
    """
    if secrets_store is None:
        secrets_store = RestSecretsStoreConfiguration()
    elif secrets_store.type != SecretsStoreType.REST:
        raise ValueError(
            "The secrets store associated with a REST zen store must be "
            f"of type REST, but is of type {secrets_store.type}."
        )

    return secrets_store
validate_url(url) classmethod

Validates that the URL is a well-formed REST store URL.

Parameters:

Name Type Description Default
url str

The URL to be validated.

required

Returns:

Type Description
str

The validated URL without trailing slashes.

Exceptions:

Type Description
ValueError

If the URL is not a well-formed REST store URL.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("url")
def validate_url(cls, url: str) -> str:
    """Validates that the URL is a well-formed REST store URL.

    Args:
        url: The URL to be validated.

    Returns:
        The validated URL without trailing slashes.

    Raises:
        ValueError: If the URL is not a well-formed REST store URL.
    """
    url = url.rstrip("/")
    scheme = re.search("^([a-z0-9]+://)", url)
    if scheme is None or scheme.group() not in ("https://", "http://"):
        raise ValueError(
            "Invalid URL for REST store: {url}. Should be in the form "
            "https://hostname[:port] or http://hostname[:port]."
        )

    # When running inside a container, if the URL uses localhost, the
    # target service will not be available. We try to replace localhost
    # with one of the special Docker or K3D internal hostnames.
    url = replace_localhost_with_internal_hostname(url)

    return url
validate_verify_ssl(verify_ssl) classmethod

Validates that the verify_ssl either points to a file or is a bool.

Parameters:

Name Type Description Default
verify_ssl Union[bool, str]

The verify_ssl value to be validated.

required

Returns:

Type Description
Union[bool, str]

The validated verify_ssl value.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("verify_ssl")
def validate_verify_ssl(
    cls, verify_ssl: Union[bool, str]
) -> Union[bool, str]:
    """Validates that the verify_ssl either points to a file or is a bool.

    Args:
        verify_ssl: The verify_ssl value to be validated.

    Returns:
        The validated verify_ssl value.
    """
    secret_folder = Path(
        GlobalConfiguration().local_stores_path,
        "certificates",
    )
    if isinstance(verify_ssl, bool) or verify_ssl.startswith(
        str(secret_folder)
    ):
        return verify_ssl

    if os.path.isfile(verify_ssl):
        with open(verify_ssl, "r") as f:
            verify_ssl = f.read()

    fileio.makedirs(str(secret_folder))
    file_path = Path(secret_folder, "ca_bundle.pem")
    with open(file_path, "w") as f:
        f.write(verify_ssl)
    file_path.chmod(0o600)
    verify_ssl = str(file_path)

    return verify_ssl

schemas special

SQL Model Implementations.

artifact_schemas

SQLModel implementation of artifact tables.

ArtifactSchema (NamedSchema) pydantic-model

SQL Model for artifacts.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
class ArtifactSchema(NamedSchema, table=True):
    """SQL Model for artifacts."""

    __tablename__ = "artifact"

    artifact_store_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StackComponentSchema.__tablename__,
        source_column="artifact_store_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="artifacts")

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="artifacts")

    type: ArtifactType
    uri: str = Field(sa_column=Column(TEXT, nullable=False))
    materializer: str = Field(sa_column=Column(TEXT, nullable=False))
    data_type: str = Field(sa_column=Column(TEXT, nullable=False))

    run_metadata: List["RunMetadataSchema"] = Relationship(
        back_populates="artifact",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    input_to_step_runs: List["StepRunInputArtifactSchema"] = Relationship(
        back_populates="artifact",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    output_of_step_runs: List["StepRunOutputArtifactSchema"] = Relationship(
        back_populates="artifact",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    visualizations: List["ArtifactVisualizationSchema"] = Relationship(
        back_populates="artifact",
        sa_relationship_kwargs={"cascade": "delete"},
    )

    @classmethod
    def from_request(
        cls, artifact_request: ArtifactRequestModel
    ) -> "ArtifactSchema":
        """Convert an `ArtifactRequestModel` to an `ArtifactSchema`.

        Args:
            artifact_request: The request model to convert.

        Returns:
            The converted schema.
        """
        return cls(
            name=artifact_request.name,
            artifact_store_id=artifact_request.artifact_store_id,
            workspace_id=artifact_request.workspace,
            user_id=artifact_request.user,
            type=artifact_request.type,
            uri=artifact_request.uri,
            materializer=artifact_request.materializer.json(),
            data_type=artifact_request.data_type.json(),
        )

    def to_model(
        self, producer_step_run_id: Optional[UUID]
    ) -> ArtifactResponseModel:
        """Convert an `ArtifactSchema` to an `ArtifactModel`.

        Args:
            producer_step_run_id: The ID of the step run that produced this
                artifact.

        Returns:
            The created `ArtifactModel`.
        """
        metadata = {
            metadata_schema.key: metadata_schema.to_model()
            for metadata_schema in self.run_metadata
        }

        try:
            materializer = Source.parse_raw(self.materializer)
        except ValidationError:
            # This is an old source which was simply an importable source path
            materializer = Source.from_import_path(self.materializer)

        try:
            data_type = Source.parse_raw(self.data_type)
        except ValidationError:
            # This is an old source which was simply an importable source path
            data_type = Source.from_import_path(self.data_type)

        return ArtifactResponseModel(
            id=self.id,
            name=self.name,
            artifact_store_id=self.artifact_store_id,
            user=self.user.to_model() if self.user else None,
            workspace=self.workspace.to_model(),
            type=self.type,
            uri=self.uri,
            materializer=materializer,
            data_type=data_type,
            created=self.created,
            updated=self.updated,
            producer_step_run_id=producer_step_run_id,
            metadata=metadata,
            visualizations=[vis.to_model() for vis in self.visualizations],
        )
from_request(artifact_request) classmethod

Convert an ArtifactRequestModel to an ArtifactSchema.

Parameters:

Name Type Description Default
artifact_request ArtifactRequestModel

The request model to convert.

required

Returns:

Type Description
ArtifactSchema

The converted schema.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
@classmethod
def from_request(
    cls, artifact_request: ArtifactRequestModel
) -> "ArtifactSchema":
    """Convert an `ArtifactRequestModel` to an `ArtifactSchema`.

    Args:
        artifact_request: The request model to convert.

    Returns:
        The converted schema.
    """
    return cls(
        name=artifact_request.name,
        artifact_store_id=artifact_request.artifact_store_id,
        workspace_id=artifact_request.workspace,
        user_id=artifact_request.user,
        type=artifact_request.type,
        uri=artifact_request.uri,
        materializer=artifact_request.materializer.json(),
        data_type=artifact_request.data_type.json(),
    )
to_model(self, producer_step_run_id)

Convert an ArtifactSchema to an ArtifactModel.

Parameters:

Name Type Description Default
producer_step_run_id Optional[uuid.UUID]

The ID of the step run that produced this artifact.

required

Returns:

Type Description
ArtifactResponseModel

The created ArtifactModel.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
def to_model(
    self, producer_step_run_id: Optional[UUID]
) -> ArtifactResponseModel:
    """Convert an `ArtifactSchema` to an `ArtifactModel`.

    Args:
        producer_step_run_id: The ID of the step run that produced this
            artifact.

    Returns:
        The created `ArtifactModel`.
    """
    metadata = {
        metadata_schema.key: metadata_schema.to_model()
        for metadata_schema in self.run_metadata
    }

    try:
        materializer = Source.parse_raw(self.materializer)
    except ValidationError:
        # This is an old source which was simply an importable source path
        materializer = Source.from_import_path(self.materializer)

    try:
        data_type = Source.parse_raw(self.data_type)
    except ValidationError:
        # This is an old source which was simply an importable source path
        data_type = Source.from_import_path(self.data_type)

    return ArtifactResponseModel(
        id=self.id,
        name=self.name,
        artifact_store_id=self.artifact_store_id,
        user=self.user.to_model() if self.user else None,
        workspace=self.workspace.to_model(),
        type=self.type,
        uri=self.uri,
        materializer=materializer,
        data_type=data_type,
        created=self.created,
        updated=self.updated,
        producer_step_run_id=producer_step_run_id,
        metadata=metadata,
        visualizations=[vis.to_model() for vis in self.visualizations],
    )
ArtifactVisualizationSchema (BaseSchema) pydantic-model

SQL Model for visualizations of artifacts.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
class ArtifactVisualizationSchema(BaseSchema, table=True):
    """SQL Model for visualizations of artifacts."""

    __tablename__ = "artifact_visualization"

    type: VisualizationType
    uri: str = Field(sa_column=Column(TEXT, nullable=False))

    artifact_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=ArtifactSchema.__tablename__,
        source_column="artifact_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    artifact: ArtifactSchema = Relationship(back_populates="visualizations")

    @classmethod
    def from_model(
        cls, visualization: VisualizationModel, artifact_id: UUID
    ) -> "ArtifactVisualizationSchema":
        """Convert a `Visualization` to a `ArtifactVisualizationSchema`.

        Args:
            visualization: The visualization.
            artifact_id: The ID of the artifact this visualization belongs to.

        Returns:
            The `ArtifactVisualizationSchema`.
        """
        return cls(
            type=visualization.type,
            uri=visualization.uri,
            artifact_id=artifact_id,
        )

    def to_model(self) -> VisualizationModel:
        """Convert an `ArtifactVisualizationSchema` to a `Visualization`.

        Returns:
            The `Visualization`.
        """
        return VisualizationModel(type=self.type, uri=self.uri)
from_model(visualization, artifact_id) classmethod

Convert a Visualization to a ArtifactVisualizationSchema.

Parameters:

Name Type Description Default
visualization VisualizationModel

The visualization.

required
artifact_id UUID

The ID of the artifact this visualization belongs to.

required

Returns:

Type Description
ArtifactVisualizationSchema

The ArtifactVisualizationSchema.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
@classmethod
def from_model(
    cls, visualization: VisualizationModel, artifact_id: UUID
) -> "ArtifactVisualizationSchema":
    """Convert a `Visualization` to a `ArtifactVisualizationSchema`.

    Args:
        visualization: The visualization.
        artifact_id: The ID of the artifact this visualization belongs to.

    Returns:
        The `ArtifactVisualizationSchema`.
    """
    return cls(
        type=visualization.type,
        uri=visualization.uri,
        artifact_id=artifact_id,
    )
to_model(self)

Convert an ArtifactVisualizationSchema to a Visualization.

Returns:

Type Description
VisualizationModel

The Visualization.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
def to_model(self) -> VisualizationModel:
    """Convert an `ArtifactVisualizationSchema` to a `Visualization`.

    Returns:
        The `Visualization`.
    """
    return VisualizationModel(type=self.type, uri=self.uri)

base_schemas

Base classes for SQLModel schemas.

BaseSchema (SQLModel) pydantic-model

Base SQL Model for ZenML entities.

Source code in zenml/zen_stores/schemas/base_schemas.py
class BaseSchema(SQLModel):
    """Base SQL Model for ZenML entities."""

    id: UUID = Field(default_factory=uuid4, primary_key=True)
    created: datetime = Field(default_factory=datetime.utcnow)
    updated: datetime = Field(default_factory=datetime.utcnow)
NamedSchema (BaseSchema) pydantic-model

Base Named SQL Model.

Source code in zenml/zen_stores/schemas/base_schemas.py
class NamedSchema(BaseSchema):
    """Base Named SQL Model."""

    name: str
ShareableSchema (NamedSchema) pydantic-model

Base shareable SQL Model.

Source code in zenml/zen_stores/schemas/base_schemas.py
class ShareableSchema(NamedSchema):
    """Base shareable SQL Model."""

    is_shared: bool

code_repository_schemas

SQL Model Implementations for code repositories.

CodeReferenceSchema (BaseSchema) pydantic-model

SQL Model for code references.

Source code in zenml/zen_stores/schemas/code_repository_schemas.py
class CodeReferenceSchema(BaseSchema, table=True):
    """SQL Model for code references."""

    __tablename__ = "code_reference"

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship()

    code_repository_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=CodeRepositorySchema.__tablename__,
        source_column="code_repository_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    code_repository: "CodeRepositorySchema" = Relationship()

    commit: str
    subdirectory: str

    @classmethod
    def from_request(
        cls, request: "CodeReferenceRequestModel", workspace_id: UUID
    ) -> "CodeReferenceSchema":
        """Convert a `CodeReferenceRequestModel` to a `CodeReferenceSchema`.

        Args:
            request: The request model to convert.
            workspace_id: The workspace ID.

        Returns:
            The converted schema.
        """
        return cls(
            workspace_id=workspace_id,
            commit=request.commit,
            subdirectory=request.subdirectory,
            code_repository_id=request.code_repository,
        )

    def to_model(
        self,
    ) -> "CodeReferenceResponseModel":
        """Convert a `CodeReferenceSchema` to a `CodeReferenceResponseModel`.

        Returns:
            The converted model.
        """
        return CodeReferenceResponseModel(
            id=self.id,
            created=self.created,
            updated=self.updated,
            commit=self.commit,
            subdirectory=self.subdirectory,
            code_repository=self.code_repository.to_model(),
        )
from_request(request, workspace_id) classmethod

Convert a CodeReferenceRequestModel to a CodeReferenceSchema.

Parameters:

Name Type Description Default
request CodeReferenceRequestModel

The request model to convert.

required
workspace_id UUID

The workspace ID.

required

Returns:

Type Description
CodeReferenceSchema

The converted schema.

Source code in zenml/zen_stores/schemas/code_repository_schemas.py
@classmethod
def from_request(
    cls, request: "CodeReferenceRequestModel", workspace_id: UUID
) -> "CodeReferenceSchema":
    """Convert a `CodeReferenceRequestModel` to a `CodeReferenceSchema`.

    Args:
        request: The request model to convert.
        workspace_id: The workspace ID.

    Returns:
        The converted schema.
    """
    return cls(
        workspace_id=workspace_id,
        commit=request.commit,
        subdirectory=request.subdirectory,
        code_repository_id=request.code_repository,
    )
to_model(self)

Convert a CodeReferenceSchema to a CodeReferenceResponseModel.

Returns:

Type Description
CodeReferenceResponseModel

The converted model.

Source code in zenml/zen_stores/schemas/code_repository_schemas.py
def to_model(
    self,
) -> "CodeReferenceResponseModel":
    """Convert a `CodeReferenceSchema` to a `CodeReferenceResponseModel`.

    Returns:
        The converted model.
    """
    return CodeReferenceResponseModel(
        id=self.id,
        created=self.created,
        updated=self.updated,
        commit=self.commit,
        subdirectory=self.subdirectory,
        code_repository=self.code_repository.to_model(),
    )
CodeRepositorySchema (NamedSchema) pydantic-model

SQL Model for code repositories.

Source code in zenml/zen_stores/schemas/code_repository_schemas.py
class CodeRepositorySchema(NamedSchema, table=True):
    """SQL Model for code repositories."""

    __tablename__ = "code_repository"

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(
        back_populates="code_repositories"
    )

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )

    user: Optional["UserSchema"] = Relationship(
        back_populates="code_repositories"
    )

    config: str = Field(sa_column=Column(TEXT, nullable=False))
    source: str = Field(sa_column=Column(TEXT, nullable=False))
    logo_url: Optional[str] = Field()
    description: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))

    @classmethod
    def from_request(
        cls,
        request: "CodeRepositoryRequestModel",
    ) -> "CodeRepositorySchema":
        """Convert a `CodeRepositoryRequestModel` to a `CodeRepositorySchema`.

        Args:
            request: The request model to convert.

        Returns:
            The converted schema.
        """
        return cls(
            name=request.name,
            workspace_id=request.workspace,
            user_id=request.user,
            config=json.dumps(request.config),
            source=request.source.json(),
            description=request.description,
            logo_url=request.logo_url,
        )

    def to_model(
        self,
    ) -> "CodeRepositoryResponseModel":
        """Convert a `CodeRepositorySchema` to a `CodeRepositoryResponseModel`.

        Returns:
            The created CodeRepositoryResponseModel.
        """
        return CodeRepositoryResponseModel(
            id=self.id,
            name=self.name,
            workspace=self.workspace.to_model(),
            user=self.user.to_model(True) if self.user else None,
            created=self.created,
            updated=self.updated,
            config=json.loads(self.config),
            source=json.loads(self.source),
            description=self.description,
            logo_url=self.logo_url,
        )

    def update(
        self, update: "CodeRepositoryUpdateModel"
    ) -> "CodeRepositorySchema":
        """Update a `CodeRepositorySchema` with a `CodeRepositoryUpdateModel`.

        Args:
            update: The update model.

        Returns:
            The updated `CodeRepositorySchema`.
        """
        if update.name:
            self.name = update.name

        if update.description:
            self.description = update.description

        if update.logo_url:
            self.logo_url = update.logo_url

        self.updated = datetime.utcnow()
        return self
from_request(request) classmethod

Convert a CodeRepositoryRequestModel to a CodeRepositorySchema.

Parameters:

Name Type Description Default
request CodeRepositoryRequestModel

The request model to convert.

required

Returns:

Type Description
CodeRepositorySchema

The converted schema.

Source code in zenml/zen_stores/schemas/code_repository_schemas.py
@classmethod
def from_request(
    cls,
    request: "CodeRepositoryRequestModel",
) -> "CodeRepositorySchema":
    """Convert a `CodeRepositoryRequestModel` to a `CodeRepositorySchema`.

    Args:
        request: The request model to convert.

    Returns:
        The converted schema.
    """
    return cls(
        name=request.name,
        workspace_id=request.workspace,
        user_id=request.user,
        config=json.dumps(request.config),
        source=request.source.json(),
        description=request.description,
        logo_url=request.logo_url,
    )
to_model(self)

Convert a CodeRepositorySchema to a CodeRepositoryResponseModel.

Returns:

Type Description
CodeRepositoryResponseModel

The created CodeRepositoryResponseModel.

Source code in zenml/zen_stores/schemas/code_repository_schemas.py
def to_model(
    self,
) -> "CodeRepositoryResponseModel":
    """Convert a `CodeRepositorySchema` to a `CodeRepositoryResponseModel`.

    Returns:
        The created CodeRepositoryResponseModel.
    """
    return CodeRepositoryResponseModel(
        id=self.id,
        name=self.name,
        workspace=self.workspace.to_model(),
        user=self.user.to_model(True) if self.user else None,
        created=self.created,
        updated=self.updated,
        config=json.loads(self.config),
        source=json.loads(self.source),
        description=self.description,
        logo_url=self.logo_url,
    )
update(self, update)

Update a CodeRepositorySchema with a CodeRepositoryUpdateModel.

Parameters:

Name Type Description Default
update CodeRepositoryUpdateModel

The update model.

required

Returns:

Type Description
CodeRepositorySchema

The updated CodeRepositorySchema.

Source code in zenml/zen_stores/schemas/code_repository_schemas.py
def update(
    self, update: "CodeRepositoryUpdateModel"
) -> "CodeRepositorySchema":
    """Update a `CodeRepositorySchema` with a `CodeRepositoryUpdateModel`.

    Args:
        update: The update model.

    Returns:
        The updated `CodeRepositorySchema`.
    """
    if update.name:
        self.name = update.name

    if update.description:
        self.description = update.description

    if update.logo_url:
        self.logo_url = update.logo_url

    self.updated = datetime.utcnow()
    return self

component_schemas

SQL Model Implementations for Stack Components.

StackComponentSchema (ShareableSchema) pydantic-model

SQL Model for stack components.

Source code in zenml/zen_stores/schemas/component_schemas.py
class StackComponentSchema(ShareableSchema, table=True):
    """SQL Model for stack components."""

    __tablename__ = "stack_component"

    type: StackComponentType
    flavor: str
    configuration: bytes
    labels: Optional[bytes]

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="components")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="components")

    stacks: List["StackSchema"] = Relationship(
        back_populates="components", link_model=StackCompositionSchema
    )
    schedules: List["ScheduleSchema"] = Relationship(
        back_populates="orchestrator",
    )

    run_metadata: List["RunMetadataSchema"] = Relationship(
        back_populates="stack_component",
    )

    run_or_step_logs: Optional["LogsSchema"] = Relationship(
        back_populates="artifact_store",
        sa_relationship_kwargs={"cascade": "delete", "uselist": False},
    )

    connector_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=ServiceConnectorSchema.__tablename__,
        source_column="connector_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    connector: Optional["ServiceConnectorSchema"] = Relationship(
        back_populates="components"
    )

    connector_resource_id: Optional[str]

    def update(
        self, component_update: ComponentUpdateModel
    ) -> "StackComponentSchema":
        """Updates a `StackSchema` from a `ComponentUpdateModel`.

        Args:
            component_update: The `ComponentUpdateModel` to update from.

        Returns:
            The updated `StackComponentSchema`.
        """
        for field, value in component_update.dict(
            exclude_unset=True, exclude={"workspace", "user", "connector"}
        ).items():
            if field == "configuration":
                self.configuration = base64.b64encode(
                    json.dumps(component_update.configuration).encode("utf-8")
                )
            elif field == "labels":
                self.labels = base64.b64encode(
                    json.dumps(component_update.labels).encode("utf-8")
                )
            else:
                setattr(self, field, value)

        self.updated = datetime.utcnow()
        return self

    def to_model(
        self,
    ) -> "ComponentResponseModel":
        """Creates a `ComponentModel` from an instance of a `StackSchema`.

        Returns:
            A `ComponentModel`
        """
        return ComponentResponseModel(
            id=self.id,
            name=self.name,
            type=self.type,
            flavor=self.flavor,
            user=self.user.to_model(True) if self.user else None,
            workspace=self.workspace.to_model(),
            connector=self.connector.to_model() if self.connector else None,
            connector_resource_id=self.connector_resource_id,
            is_shared=self.is_shared,
            configuration=json.loads(
                base64.b64decode(self.configuration).decode()
            ),
            labels=json.loads(base64.b64decode(self.labels).decode())
            if self.labels
            else None,
            created=self.created,
            updated=self.updated,
        )
to_model(self)

Creates a ComponentModel from an instance of a StackSchema.

Returns:

Type Description
ComponentResponseModel

A ComponentModel

Source code in zenml/zen_stores/schemas/component_schemas.py
def to_model(
    self,
) -> "ComponentResponseModel":
    """Creates a `ComponentModel` from an instance of a `StackSchema`.

    Returns:
        A `ComponentModel`
    """
    return ComponentResponseModel(
        id=self.id,
        name=self.name,
        type=self.type,
        flavor=self.flavor,
        user=self.user.to_model(True) if self.user else None,
        workspace=self.workspace.to_model(),
        connector=self.connector.to_model() if self.connector else None,
        connector_resource_id=self.connector_resource_id,
        is_shared=self.is_shared,
        configuration=json.loads(
            base64.b64decode(self.configuration).decode()
        ),
        labels=json.loads(base64.b64decode(self.labels).decode())
        if self.labels
        else None,
        created=self.created,
        updated=self.updated,
    )
update(self, component_update)

Updates a StackSchema from a ComponentUpdateModel.

Parameters:

Name Type Description Default
component_update ComponentUpdateModel

The ComponentUpdateModel to update from.

required

Returns:

Type Description
StackComponentSchema

The updated StackComponentSchema.

Source code in zenml/zen_stores/schemas/component_schemas.py
def update(
    self, component_update: ComponentUpdateModel
) -> "StackComponentSchema":
    """Updates a `StackSchema` from a `ComponentUpdateModel`.

    Args:
        component_update: The `ComponentUpdateModel` to update from.

    Returns:
        The updated `StackComponentSchema`.
    """
    for field, value in component_update.dict(
        exclude_unset=True, exclude={"workspace", "user", "connector"}
    ).items():
        if field == "configuration":
            self.configuration = base64.b64encode(
                json.dumps(component_update.configuration).encode("utf-8")
            )
        elif field == "labels":
            self.labels = base64.b64encode(
                json.dumps(component_update.labels).encode("utf-8")
            )
        else:
            setattr(self, field, value)

    self.updated = datetime.utcnow()
    return self

flavor_schemas

SQL Model Implementations for Flavors.

FlavorSchema (NamedSchema) pydantic-model

SQL Model for flavors.

Attributes:

Name Type Description
type StackComponentType

The type of the flavor.

source str

The source of the flavor.

config_schema str

The config schema of the flavor.

integration Optional[str]

The integration associated with the flavor.

Source code in zenml/zen_stores/schemas/flavor_schemas.py
class FlavorSchema(NamedSchema, table=True):
    """SQL Model for flavors.

    Attributes:
        type: The type of the flavor.
        source: The source of the flavor.
        config_schema: The config schema of the flavor.
        integration: The integration associated with the flavor.
    """

    __tablename__ = "flavor"

    type: StackComponentType
    source: str
    config_schema: str = Field(sa_column=Column(TEXT, nullable=False))
    integration: Optional[str] = Field(default="")
    connector_type: Optional[str]
    connector_resource_type: Optional[str]
    connector_resource_id_attr: Optional[str]

    workspace_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    workspace: Optional["WorkspaceSchema"] = Relationship(
        back_populates="flavors"
    )

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="flavors")

    logo_url: Optional[str] = Field()

    docs_url: Optional[str] = Field()

    sdk_docs_url: Optional[str] = Field()

    is_custom: bool = Field(default=True)

    def update(self, flavor_update: FlavorUpdateModel) -> "FlavorSchema":
        """Update a `FlavorSchema` from a `FlavorUpdateModel`.

        Args:
            flavor_update: The `FlavorUpdateModel` from which to update the schema.

        Returns:
            The updated `FlavorSchema`.
        """
        for field, value in flavor_update.dict(exclude_unset=True).items():
            if field == "config_schema":
                setattr(self, field, json.dumps(value))
            else:
                setattr(self, field, value)

        self.updated = datetime.utcnow()
        return self

    def to_model(self) -> FlavorResponseModel:
        """Converts a flavor schema to a flavor model.

        Returns:
            The flavor model.
        """
        return FlavorResponseModel(
            id=self.id,
            name=self.name,
            type=self.type,
            source=self.source,
            config_schema=json.loads(self.config_schema),
            integration=self.integration,
            connector_type=self.connector_type,
            connector_resource_type=self.connector_resource_type,
            connector_resource_id_attr=self.connector_resource_id_attr,
            user=self.user.to_model() if self.user else None,
            workspace=self.workspace.to_model() if self.workspace else None,
            created=self.created,
            updated=self.updated,
            logo_url=self.logo_url,
            docs_url=self.docs_url,
            sdk_docs_url=self.sdk_docs_url,
            is_custom=self.is_custom,
        )
to_model(self)

Converts a flavor schema to a flavor model.

Returns:

Type Description
FlavorResponseModel

The flavor model.

Source code in zenml/zen_stores/schemas/flavor_schemas.py
def to_model(self) -> FlavorResponseModel:
    """Converts a flavor schema to a flavor model.

    Returns:
        The flavor model.
    """
    return FlavorResponseModel(
        id=self.id,
        name=self.name,
        type=self.type,
        source=self.source,
        config_schema=json.loads(self.config_schema),
        integration=self.integration,
        connector_type=self.connector_type,
        connector_resource_type=self.connector_resource_type,
        connector_resource_id_attr=self.connector_resource_id_attr,
        user=self.user.to_model() if self.user else None,
        workspace=self.workspace.to_model() if self.workspace else None,
        created=self.created,
        updated=self.updated,
        logo_url=self.logo_url,
        docs_url=self.docs_url,
        sdk_docs_url=self.sdk_docs_url,
        is_custom=self.is_custom,
    )
update(self, flavor_update)

Update a FlavorSchema from a FlavorUpdateModel.

Parameters:

Name Type Description Default
flavor_update FlavorUpdateModel

The FlavorUpdateModel from which to update the schema.

required

Returns:

Type Description
FlavorSchema

The updated FlavorSchema.

Source code in zenml/zen_stores/schemas/flavor_schemas.py
def update(self, flavor_update: FlavorUpdateModel) -> "FlavorSchema":
    """Update a `FlavorSchema` from a `FlavorUpdateModel`.

    Args:
        flavor_update: The `FlavorUpdateModel` from which to update the schema.

    Returns:
        The updated `FlavorSchema`.
    """
    for field, value in flavor_update.dict(exclude_unset=True).items():
        if field == "config_schema":
            setattr(self, field, json.dumps(value))
        else:
            setattr(self, field, value)

    self.updated = datetime.utcnow()
    return self

identity_schemas

SQLModel implementation for the server information table.

IdentitySchema (SQLModel) pydantic-model

SQL Model for the client/server identity.

Source code in zenml/zen_stores/schemas/identity_schemas.py
class IdentitySchema(SQLModel, table=True):
    """SQL Model for the client/server identity."""

    __tablename__ = "identity"

    id: UUID = Field(primary_key=True)

logs_schemas

SQLModel implementation of pipeline logs tables.

LogsSchema (BaseSchema) pydantic-model

SQL Model for logs.

Source code in zenml/zen_stores/schemas/logs_schemas.py
class LogsSchema(BaseSchema, table=True):
    """SQL Model for logs."""

    __tablename__ = "logs"

    pipeline_run_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineRunSchema.__tablename__,
        source_column="pipeline_run_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    pipeline_run: Optional["PipelineRunSchema"] = Relationship(
        back_populates="logs"
    )

    step_run_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="step_run_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    step_run: Optional["StepRunSchema"] = Relationship(back_populates="logs")

    artifact_store_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StackComponentSchema.__tablename__,
        source_column="stack_component_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    artifact_store: Optional["StackComponentSchema"] = Relationship(
        back_populates="run_or_step_logs"
    )

    uri: str = Field(sa_column=Column(TEXT, nullable=False))

    def to_model(self) -> "LogsResponseModel":
        """Convert a `LogsSchema` to a `LogsResponseModel`.

        Returns:
            The created `LogsModel`.
        """
        return LogsResponseModel(
            id=self.id,
            pipeline_run_id=self.pipeline_run_id,
            step_run_id=self.step_run_id,
            artifact_store_id=self.artifact_store_id,
            uri=self.uri,
            created=self.created,
            updated=self.updated,
        )
to_model(self)

Convert a LogsSchema to a LogsResponseModel.

Returns:

Type Description
LogsResponseModel

The created LogsModel.

Source code in zenml/zen_stores/schemas/logs_schemas.py
def to_model(self) -> "LogsResponseModel":
    """Convert a `LogsSchema` to a `LogsResponseModel`.

    Returns:
        The created `LogsModel`.
    """
    return LogsResponseModel(
        id=self.id,
        pipeline_run_id=self.pipeline_run_id,
        step_run_id=self.step_run_id,
        artifact_store_id=self.artifact_store_id,
        uri=self.uri,
        created=self.created,
        updated=self.updated,
    )

pipeline_build_schemas

SQLModel implementation of pipeline build tables.

PipelineBuildSchema (BaseSchema) pydantic-model

SQL Model for pipeline builds.

Source code in zenml/zen_stores/schemas/pipeline_build_schemas.py
class PipelineBuildSchema(BaseSchema, table=True):
    """SQL Model for pipeline builds."""

    __tablename__ = "pipeline_build"

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="builds")

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="builds")

    stack_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StackSchema.__tablename__,
        source_column="stack_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    stack: Optional["StackSchema"] = Relationship(back_populates="builds")

    pipeline_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineSchema.__tablename__,
        source_column="pipeline_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    pipeline: Optional["PipelineSchema"] = Relationship(
        back_populates="builds"
    )

    runs: List["PipelineRunSchema"] = Relationship(back_populates="build")
    deployments: List["PipelineDeploymentSchema"] = Relationship(
        back_populates="build",
    )

    images: str = Field(
        sa_column=Column(
            String(length=MEDIUMTEXT_MAX_LENGTH).with_variant(
                MEDIUMTEXT, "mysql"
            ),
            nullable=False,
        )
    )

    is_local: bool
    contains_code: bool

    zenml_version: Optional[str]
    python_version: Optional[str]
    checksum: Optional[str]

    @classmethod
    def from_request(
        cls, request: PipelineBuildRequestModel
    ) -> "PipelineBuildSchema":
        """Convert a `PipelineBuildRequestModel` to a `PipelineBuildSchema`.

        Args:
            request: The request to convert.

        Returns:
            The created `PipelineBuildSchema`.
        """
        return cls(
            stack_id=request.stack,
            workspace_id=request.workspace,
            user_id=request.user,
            pipeline_id=request.pipeline,
            images=json.dumps(request.images, default=pydantic_encoder),
            is_local=request.is_local,
            contains_code=request.contains_code,
            zenml_version=request.zenml_version,
            python_version=request.python_version,
            checksum=request.checksum,
        )

    def to_model(
        self,
    ) -> PipelineBuildResponseModel:
        """Convert a `PipelineBuildSchema` to a `PipelineBuildResponseModel`.

        Returns:
            The created `PipelineBuildResponseModel`.
        """
        return PipelineBuildResponseModel(
            id=self.id,
            workspace=self.workspace.to_model(),
            user=self.user.to_model(True) if self.user else None,
            stack=self.stack.to_model() if self.stack else None,
            pipeline=self.pipeline.to_model() if self.pipeline else None,
            created=self.created,
            updated=self.updated,
            images=json.loads(self.images),
            is_local=self.is_local,
            contains_code=self.contains_code,
            zenml_version=self.zenml_version,
            python_version=self.python_version,
            checksum=self.checksum,
        )
from_request(request) classmethod

Convert a PipelineBuildRequestModel to a PipelineBuildSchema.

Parameters:

Name Type Description Default
request PipelineBuildRequestModel

The request to convert.

required

Returns:

Type Description
PipelineBuildSchema

The created PipelineBuildSchema.

Source code in zenml/zen_stores/schemas/pipeline_build_schemas.py
@classmethod
def from_request(
    cls, request: PipelineBuildRequestModel
) -> "PipelineBuildSchema":
    """Convert a `PipelineBuildRequestModel` to a `PipelineBuildSchema`.

    Args:
        request: The request to convert.

    Returns:
        The created `PipelineBuildSchema`.
    """
    return cls(
        stack_id=request.stack,
        workspace_id=request.workspace,
        user_id=request.user,
        pipeline_id=request.pipeline,
        images=json.dumps(request.images, default=pydantic_encoder),
        is_local=request.is_local,
        contains_code=request.contains_code,
        zenml_version=request.zenml_version,
        python_version=request.python_version,
        checksum=request.checksum,
    )
to_model(self)

Convert a PipelineBuildSchema to a PipelineBuildResponseModel.

Returns:

Type Description
PipelineBuildResponseModel

The created PipelineBuildResponseModel.

Source code in zenml/zen_stores/schemas/pipeline_build_schemas.py
def to_model(
    self,
) -> PipelineBuildResponseModel:
    """Convert a `PipelineBuildSchema` to a `PipelineBuildResponseModel`.

    Returns:
        The created `PipelineBuildResponseModel`.
    """
    return PipelineBuildResponseModel(
        id=self.id,
        workspace=self.workspace.to_model(),
        user=self.user.to_model(True) if self.user else None,
        stack=self.stack.to_model() if self.stack else None,
        pipeline=self.pipeline.to_model() if self.pipeline else None,
        created=self.created,
        updated=self.updated,
        images=json.loads(self.images),
        is_local=self.is_local,
        contains_code=self.contains_code,
        zenml_version=self.zenml_version,
        python_version=self.python_version,
        checksum=self.checksum,
    )

pipeline_deployment_schemas

SQLModel implementation of pipeline deployment tables.

PipelineDeploymentSchema (BaseSchema) pydantic-model

SQL Model for pipeline deployments.

Source code in zenml/zen_stores/schemas/pipeline_deployment_schemas.py
class PipelineDeploymentSchema(BaseSchema, table=True):
    """SQL Model for pipeline deployments."""

    __tablename__ = "pipeline_deployment"

    stack_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StackSchema.__tablename__,
        source_column="stack_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    stack: "StackSchema" = Relationship(back_populates="deployments")

    pipeline_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineSchema.__tablename__,
        source_column="pipeline_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    pipeline: "PipelineSchema" = Relationship(back_populates="deployments")

    build_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineBuildSchema.__tablename__,
        source_column="build_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    build: Optional["PipelineBuildSchema"] = Relationship(
        back_populates="deployments"
    )

    schedule_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=ScheduleSchema.__tablename__,
        source_column="schedule_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    schedule: Optional[ScheduleSchema] = Relationship(
        back_populates="deployment"
    )

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="deployments")

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="deployments")

    code_reference_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=CodeReferenceSchema.__tablename__,
        source_column="code_reference_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    code_reference: Optional["CodeReferenceSchema"] = Relationship()

    runs: List["PipelineRunSchema"] = Relationship(back_populates="deployment")

    run_name_template: str
    pipeline_configuration: str = Field(sa_column=Column(TEXT, nullable=False))
    step_configurations: str = Field(
        sa_column=Column(
            String(length=MEDIUMTEXT_MAX_LENGTH).with_variant(
                MEDIUMTEXT, "mysql"
            ),
            nullable=False,
        )
    )
    client_environment: str = Field(sa_column=Column(TEXT, nullable=False))

    @classmethod
    def from_request(
        cls,
        request: PipelineDeploymentRequestModel,
        code_reference_id: Optional[UUID],
    ) -> "PipelineDeploymentSchema":
        """Convert a `PipelineDeploymentRequestModel` to a `PipelineDeploymentSchema`.

        Args:
            request: The request to convert.
            code_reference_id: Optional ID of the code reference for the
                deployment.

        Returns:
            The created `PipelineDeploymentSchema`.
        """
        return cls(
            stack_id=request.stack,
            workspace_id=request.workspace,
            pipeline_id=request.pipeline,
            build_id=request.build,
            user_id=request.user,
            schedule_id=request.schedule,
            code_reference_id=code_reference_id,
            run_name_template=request.run_name_template,
            pipeline_configuration=request.pipeline_configuration.json(),
            step_configurations=json.dumps(
                request.step_configurations,
                sort_keys=False,
                default=pydantic_encoder,
            ),
            client_environment=json.dumps(request.client_environment),
        )

    def to_model(
        self,
    ) -> PipelineDeploymentResponseModel:
        """Convert a `PipelineDeploymentSchema` to a `PipelineDeploymentResponseModel`.

        Returns:
            The created `PipelineDeploymentResponseModel`.
        """
        return PipelineDeploymentResponseModel(
            id=self.id,
            workspace=self.workspace.to_model(),
            user=self.user.to_model(True) if self.user else None,
            stack=self.stack.to_model() if self.stack else None,
            pipeline=self.pipeline.to_model() if self.pipeline else None,
            build=self.build.to_model() if self.build else None,
            schedule=self.schedule.to_model() if self.schedule else None,
            code_reference=self.code_reference.to_model()
            if self.code_reference
            else None,
            created=self.created,
            updated=self.updated,
            run_name_template=self.run_name_template,
            pipeline_configuration=PipelineConfiguration.parse_raw(
                self.pipeline_configuration
            ),
            step_configurations=json.loads(self.step_configurations),
            client_environment=json.loads(self.client_environment),
        )
from_request(request, code_reference_id) classmethod

Convert a PipelineDeploymentRequestModel to a PipelineDeploymentSchema.

Parameters:

Name Type Description Default
request PipelineDeploymentRequestModel

The request to convert.

required
code_reference_id Optional[uuid.UUID]

Optional ID of the code reference for the deployment.

required

Returns:

Type Description
PipelineDeploymentSchema

The created PipelineDeploymentSchema.

Source code in zenml/zen_stores/schemas/pipeline_deployment_schemas.py
@classmethod
def from_request(
    cls,
    request: PipelineDeploymentRequestModel,
    code_reference_id: Optional[UUID],
) -> "PipelineDeploymentSchema":
    """Convert a `PipelineDeploymentRequestModel` to a `PipelineDeploymentSchema`.

    Args:
        request: The request to convert.
        code_reference_id: Optional ID of the code reference for the
            deployment.

    Returns:
        The created `PipelineDeploymentSchema`.
    """
    return cls(
        stack_id=request.stack,
        workspace_id=request.workspace,
        pipeline_id=request.pipeline,
        build_id=request.build,
        user_id=request.user,
        schedule_id=request.schedule,
        code_reference_id=code_reference_id,
        run_name_template=request.run_name_template,
        pipeline_configuration=request.pipeline_configuration.json(),
        step_configurations=json.dumps(
            request.step_configurations,
            sort_keys=False,
            default=pydantic_encoder,
        ),
        client_environment=json.dumps(request.client_environment),
    )
to_model(self)

Convert a PipelineDeploymentSchema to a PipelineDeploymentResponseModel.

Returns:

Type Description
PipelineDeploymentResponseModel

The created PipelineDeploymentResponseModel.

Source code in zenml/zen_stores/schemas/pipeline_deployment_schemas.py
def to_model(
    self,
) -> PipelineDeploymentResponseModel:
    """Convert a `PipelineDeploymentSchema` to a `PipelineDeploymentResponseModel`.

    Returns:
        The created `PipelineDeploymentResponseModel`.
    """
    return PipelineDeploymentResponseModel(
        id=self.id,
        workspace=self.workspace.to_model(),
        user=self.user.to_model(True) if self.user else None,
        stack=self.stack.to_model() if self.stack else None,
        pipeline=self.pipeline.to_model() if self.pipeline else None,
        build=self.build.to_model() if self.build else None,
        schedule=self.schedule.to_model() if self.schedule else None,
        code_reference=self.code_reference.to_model()
        if self.code_reference
        else None,
        created=self.created,
        updated=self.updated,
        run_name_template=self.run_name_template,
        pipeline_configuration=PipelineConfiguration.parse_raw(
            self.pipeline_configuration
        ),
        step_configurations=json.loads(self.step_configurations),
        client_environment=json.loads(self.client_environment),
    )

pipeline_run_schemas

SQLModel implementation of pipeline run tables.

PipelineRunSchema (NamedSchema) pydantic-model

SQL Model for pipeline runs.

Source code in zenml/zen_stores/schemas/pipeline_run_schemas.py
class PipelineRunSchema(NamedSchema, table=True):
    """SQL Model for pipeline runs."""

    __tablename__ = "pipeline_run"

    stack_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StackSchema.__tablename__,
        source_column="stack_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    stack: Optional["StackSchema"] = Relationship(back_populates="runs")

    pipeline_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineSchema.__tablename__,
        source_column="pipeline_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    pipeline: Optional["PipelineSchema"] = Relationship(back_populates="runs")

    build_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineBuildSchema.__tablename__,
        source_column="build_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    build: Optional["PipelineBuildSchema"] = Relationship(
        back_populates="runs"
    )

    deployment_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=PipelineDeploymentSchema.__tablename__,
        source_column="deployment_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    deployment: Optional["PipelineDeploymentSchema"] = Relationship(
        back_populates="runs"
    )

    schedule_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=ScheduleSchema.__tablename__,
        source_column="schedule_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    schedule: Optional[ScheduleSchema] = Relationship(back_populates="runs")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="runs")

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="runs")

    orchestrator_run_id: Optional[str] = Field(nullable=True)

    enable_cache: Optional[bool] = Field(nullable=True)
    enable_artifact_metadata: Optional[bool] = Field(nullable=True)
    start_time: Optional[datetime] = Field(nullable=True)
    end_time: Optional[datetime] = Field(nullable=True, default=None)
    status: ExecutionStatus
    pipeline_configuration: str = Field(sa_column=Column(TEXT, nullable=False))
    num_steps: Optional[int]
    client_version: str
    server_version: Optional[str] = Field(nullable=True)
    client_environment: Optional[str] = Field(
        sa_column=Column(TEXT, nullable=True)
    )
    orchestrator_environment: Optional[str] = Field(
        sa_column=Column(TEXT, nullable=True)
    )
    git_sha: Optional[str] = Field(nullable=True, default=None)  # DEPRECATED

    run_metadata: List["RunMetadataSchema"] = Relationship(
        back_populates="pipeline_run",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    step_runs: List["StepRunSchema"] = Relationship(
        back_populates="pipeline_run",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    logs: Optional["LogsSchema"] = Relationship(
        back_populates="pipeline_run",
        sa_relationship_kwargs={"cascade": "delete", "uselist": False},
    )

    @classmethod
    def from_request(
        cls, request: PipelineRunRequestModel
    ) -> "PipelineRunSchema":
        """Convert a `PipelineRunRequestModel` to a `PipelineRunSchema`.

        Args:
            request: The request to convert.

        Returns:
            The created `PipelineRunSchema`.
        """
        config = request.config
        client_environment = json.dumps(request.client_environment)
        orchestrator_environment = json.dumps(request.orchestrator_environment)

        return cls(
            id=request.id,
            name=request.name,
            orchestrator_run_id=request.orchestrator_run_id,
            stack_id=request.stack,
            workspace_id=request.workspace,
            user_id=request.user,
            pipeline_id=request.pipeline,
            build_id=request.build,
            deployment_id=request.deployment,
            schedule_id=request.schedule_id,
            enable_cache=config.enable_cache,
            enable_artifact_metadata=config.enable_artifact_metadata,
            start_time=request.start_time,
            status=request.status,
            pipeline_configuration=config.json(sort_keys=True),
            num_steps=request.num_steps,
            client_version=request.client_version,
            server_version=request.server_version,
            client_environment=client_environment,
            orchestrator_environment=orchestrator_environment,
        )

    def to_model(
        self,
        steps: Optional[Dict[str, "StepRunResponseModel"]] = None,
    ) -> PipelineRunResponseModel:
        """Convert a `PipelineRunSchema` to a `PipelineRunResponseModel`.

        Args:
            steps: The steps to include in the response.

        Returns:
            The created `PipelineRunResponseModel`.
        """
        client_environment = (
            json.loads(self.client_environment)
            if self.client_environment
            else {}
        )
        orchestrator_environment = (
            json.loads(self.orchestrator_environment)
            if self.orchestrator_environment
            else {}
        )
        metadata = {
            metadata_schema.key: metadata_schema.to_model()
            for metadata_schema in self.run_metadata
        }
        config = PipelineConfiguration.parse_raw(self.pipeline_configuration)

        pipeline = self.pipeline.to_model() if self.pipeline else None
        build = self.build.to_model() if self.build else None
        deployment = self.deployment.to_model() if self.deployment else None
        steps = steps or {}

        return PipelineRunResponseModel(
            id=self.id,
            name=self.name,
            stack=self.stack.to_model() if self.stack else None,
            workspace=self.workspace.to_model(),
            user=self.user.to_model(True) if self.user else None,
            orchestrator_run_id=self.orchestrator_run_id,
            start_time=self.start_time,
            end_time=self.end_time,
            status=self.status,
            pipeline=pipeline,
            build=build,
            deployment=deployment,
            schedule_id=self.schedule_id,
            config=config,
            num_steps=self.num_steps,
            client_version=self.client_version,
            server_version=self.server_version,
            client_environment=client_environment,
            orchestrator_environment=orchestrator_environment,
            created=self.created,
            updated=self.updated,
            metadata=metadata,
            steps=steps,
        )

    def update(
        self, run_update: "PipelineRunUpdateModel"
    ) -> "PipelineRunSchema":
        """Update a `PipelineRunSchema` with a `PipelineRunUpdateModel`.

        Args:
            run_update: The `PipelineRunUpdateModel` to update with.

        Returns:
            The updated `PipelineRunSchema`.
        """
        if run_update.status:
            self.status = run_update.status
            self.end_time = run_update.end_time

        self.updated = datetime.utcnow()
        return self
from_request(request) classmethod

Convert a PipelineRunRequestModel to a PipelineRunSchema.

Parameters:

Name Type Description Default
request PipelineRunRequestModel

The request to convert.

required

Returns:

Type Description
PipelineRunSchema

The created PipelineRunSchema.

Source code in zenml/zen_stores/schemas/pipeline_run_schemas.py
@classmethod
def from_request(
    cls, request: PipelineRunRequestModel
) -> "PipelineRunSchema":
    """Convert a `PipelineRunRequestModel` to a `PipelineRunSchema`.

    Args:
        request: The request to convert.

    Returns:
        The created `PipelineRunSchema`.
    """
    config = request.config
    client_environment = json.dumps(request.client_environment)
    orchestrator_environment = json.dumps(request.orchestrator_environment)

    return cls(
        id=request.id,
        name=request.name,
        orchestrator_run_id=request.orchestrator_run_id,
        stack_id=request.stack,
        workspace_id=request.workspace,
        user_id=request.user,
        pipeline_id=request.pipeline,
        build_id=request.build,
        deployment_id=request.deployment,
        schedule_id=request.schedule_id,
        enable_cache=config.enable_cache,
        enable_artifact_metadata=config.enable_artifact_metadata,
        start_time=request.start_time,
        status=request.status,
        pipeline_configuration=config.json(sort_keys=True),
        num_steps=request.num_steps,
        client_version=request.client_version,
        server_version=request.server_version,
        client_environment=client_environment,
        orchestrator_environment=orchestrator_environment,
    )
to_model(self, steps=None)

Convert a PipelineRunSchema to a PipelineRunResponseModel.

Parameters:

Name Type Description Default
steps Optional[Dict[str, StepRunResponseModel]]

The steps to include in the response.

None

Returns:

Type Description
PipelineRunResponseModel

The created PipelineRunResponseModel.

Source code in zenml/zen_stores/schemas/pipeline_run_schemas.py
def to_model(
    self,
    steps: Optional[Dict[str, "StepRunResponseModel"]] = None,
) -> PipelineRunResponseModel:
    """Convert a `PipelineRunSchema` to a `PipelineRunResponseModel`.

    Args:
        steps: The steps to include in the response.

    Returns:
        The created `PipelineRunResponseModel`.
    """
    client_environment = (
        json.loads(self.client_environment)
        if self.client_environment
        else {}
    )
    orchestrator_environment = (
        json.loads(self.orchestrator_environment)
        if self.orchestrator_environment
        else {}
    )
    metadata = {
        metadata_schema.key: metadata_schema.to_model()
        for metadata_schema in self.run_metadata
    }
    config = PipelineConfiguration.parse_raw(self.pipeline_configuration)

    pipeline = self.pipeline.to_model() if self.pipeline else None
    build = self.build.to_model() if self.build else None
    deployment = self.deployment.to_model() if self.deployment else None
    steps = steps or {}

    return PipelineRunResponseModel(
        id=self.id,
        name=self.name,
        stack=self.stack.to_model() if self.stack else None,
        workspace=self.workspace.to_model(),
        user=self.user.to_model(True) if self.user else None,
        orchestrator_run_id=self.orchestrator_run_id,
        start_time=self.start_time,
        end_time=self.end_time,
        status=self.status,
        pipeline=pipeline,
        build=build,
        deployment=deployment,
        schedule_id=self.schedule_id,
        config=config,
        num_steps=self.num_steps,
        client_version=self.client_version,
        server_version=self.server_version,
        client_environment=client_environment,
        orchestrator_environment=orchestrator_environment,
        created=self.created,
        updated=self.updated,
        metadata=metadata,
        steps=steps,
    )
update(self, run_update)

Update a PipelineRunSchema with a PipelineRunUpdateModel.

Parameters:

Name Type Description Default
run_update PipelineRunUpdateModel

The PipelineRunUpdateModel to update with.

required

Returns:

Type Description
PipelineRunSchema

The updated PipelineRunSchema.

Source code in zenml/zen_stores/schemas/pipeline_run_schemas.py
def update(
    self, run_update: "PipelineRunUpdateModel"
) -> "PipelineRunSchema":
    """Update a `PipelineRunSchema` with a `PipelineRunUpdateModel`.

    Args:
        run_update: The `PipelineRunUpdateModel` to update with.

    Returns:
        The updated `PipelineRunSchema`.
    """
    if run_update.status:
        self.status = run_update.status
        self.end_time = run_update.end_time

    self.updated = datetime.utcnow()
    return self

pipeline_schemas

SQL Model Implementations for Pipelines and Pipeline Runs.

PipelineSchema (NamedSchema) pydantic-model

SQL Model for pipelines.

Source code in zenml/zen_stores/schemas/pipeline_schemas.py
class PipelineSchema(NamedSchema, table=True):
    """SQL Model for pipelines."""

    __tablename__ = "pipeline"

    version: str
    version_hash: str

    docstring: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
    spec: str = Field(sa_column=Column(TEXT, nullable=False))

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="pipelines")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )

    user: Optional["UserSchema"] = Relationship(back_populates="pipelines")

    schedules: List["ScheduleSchema"] = Relationship(
        back_populates="pipeline",
    )
    runs: List["PipelineRunSchema"] = Relationship(
        back_populates="pipeline", sa_relationship_kwargs={"cascade": "delete"}
    )
    builds: List["PipelineBuildSchema"] = Relationship(
        back_populates="pipeline"
    )
    deployments: List["PipelineDeploymentSchema"] = Relationship(
        back_populates="pipeline"
    )

    @classmethod
    def from_request(
        cls,
        pipeline_request: "PipelineRequestModel",
    ) -> "PipelineSchema":
        """Convert a `PipelineRequestModel` to a `PipelineSchema`.

        Args:
            pipeline_request: The request model to convert.

        Returns:
            The converted schema.
        """
        return cls(
            name=pipeline_request.name,
            version=pipeline_request.version,
            version_hash=pipeline_request.version_hash,
            workspace_id=pipeline_request.workspace,
            user_id=pipeline_request.user,
            docstring=pipeline_request.docstring,
            spec=pipeline_request.spec.json(sort_keys=True),
        )

    def to_model(
        self,
        last_x_runs: int = 3,
    ) -> "PipelineResponseModel":
        """Convert a `PipelineSchema` to a `PipelineModel`.

        Args:
            last_x_runs: How many runs to use for the execution status

        Returns:
            The created PipelineModel.
        """
        return PipelineResponseModel(
            id=self.id,
            name=self.name,
            version=self.version,
            version_hash=self.version_hash,
            workspace=self.workspace.to_model(),
            user=self.user.to_model(True) if self.user else None,
            docstring=self.docstring,
            spec=PipelineSpec.parse_raw(self.spec),
            created=self.created,
            updated=self.updated,
            status=[run.status for run in self.runs[:last_x_runs]],
        )

    def update(
        self, pipeline_update: "PipelineUpdateModel"
    ) -> "PipelineSchema":
        """Update a `PipelineSchema` with a `PipelineUpdateModel`.

        Args:
            pipeline_update: The update model.

        Returns:
            The updated `PipelineSchema`.
        """
        if pipeline_update.name:
            self.name = pipeline_update.name

        if pipeline_update.docstring:
            self.docstring = pipeline_update.docstring

        if pipeline_update.spec:
            self.spec = pipeline_update.spec.json(sort_keys=True)

        self.updated = datetime.utcnow()
        return self
from_request(pipeline_request) classmethod

Convert a PipelineRequestModel to a PipelineSchema.

Parameters:

Name Type Description Default
pipeline_request PipelineRequestModel

The request model to convert.

required

Returns:

Type Description
PipelineSchema

The converted schema.

Source code in zenml/zen_stores/schemas/pipeline_schemas.py
@classmethod
def from_request(
    cls,
    pipeline_request: "PipelineRequestModel",
) -> "PipelineSchema":
    """Convert a `PipelineRequestModel` to a `PipelineSchema`.

    Args:
        pipeline_request: The request model to convert.

    Returns:
        The converted schema.
    """
    return cls(
        name=pipeline_request.name,
        version=pipeline_request.version,
        version_hash=pipeline_request.version_hash,
        workspace_id=pipeline_request.workspace,
        user_id=pipeline_request.user,
        docstring=pipeline_request.docstring,
        spec=pipeline_request.spec.json(sort_keys=True),
    )
to_model(self, last_x_runs=3)

Convert a PipelineSchema to a PipelineModel.

Parameters:

Name Type Description Default
last_x_runs int

How many runs to use for the execution status

3

Returns:

Type Description
PipelineResponseModel

The created PipelineModel.

Source code in zenml/zen_stores/schemas/pipeline_schemas.py
def to_model(
    self,
    last_x_runs: int = 3,
) -> "PipelineResponseModel":
    """Convert a `PipelineSchema` to a `PipelineModel`.

    Args:
        last_x_runs: How many runs to use for the execution status

    Returns:
        The created PipelineModel.
    """
    return PipelineResponseModel(
        id=self.id,
        name=self.name,
        version=self.version,
        version_hash=self.version_hash,
        workspace=self.workspace.to_model(),
        user=self.user.to_model(True) if self.user else None,
        docstring=self.docstring,
        spec=PipelineSpec.parse_raw(self.spec),
        created=self.created,
        updated=self.updated,
        status=[run.status for run in self.runs[:last_x_runs]],
    )
update(self, pipeline_update)

Update a PipelineSchema with a PipelineUpdateModel.

Parameters:

Name Type Description Default
pipeline_update PipelineUpdateModel

The update model.

required

Returns:

Type Description
PipelineSchema

The updated PipelineSchema.

Source code in zenml/zen_stores/schemas/pipeline_schemas.py
def update(
    self, pipeline_update: "PipelineUpdateModel"
) -> "PipelineSchema":
    """Update a `PipelineSchema` with a `PipelineUpdateModel`.

    Args:
        pipeline_update: The update model.

    Returns:
        The updated `PipelineSchema`.
    """
    if pipeline_update.name:
        self.name = pipeline_update.name

    if pipeline_update.docstring:
        self.docstring = pipeline_update.docstring

    if pipeline_update.spec:
        self.spec = pipeline_update.spec.json(sort_keys=True)

    self.updated = datetime.utcnow()
    return self

role_schemas

SQLModel implementation of roles that can be assigned to users or teams.

RolePermissionSchema (SQLModel) pydantic-model

SQL Model for team assignments.

Source code in zenml/zen_stores/schemas/role_schemas.py
class RolePermissionSchema(SQLModel, table=True):
    """SQL Model for team assignments."""

    __tablename__ = "role_permission"

    name: PermissionType = Field(primary_key=True)
    role_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=RoleSchema.__tablename__,
        source_column="role_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    roles: List["RoleSchema"] = Relationship(back_populates="permissions")
RoleSchema (NamedSchema) pydantic-model

SQL Model for roles.

Source code in zenml/zen_stores/schemas/role_schemas.py
class RoleSchema(NamedSchema, table=True):
    """SQL Model for roles."""

    __tablename__ = "role"

    permissions: List["RolePermissionSchema"] = Relationship(
        back_populates="roles", sa_relationship_kwargs={"cascade": "delete"}
    )
    user_role_assignments: List["UserRoleAssignmentSchema"] = Relationship(
        back_populates="role", sa_relationship_kwargs={"cascade": "delete"}
    )
    team_role_assignments: List["TeamRoleAssignmentSchema"] = Relationship(
        back_populates="role", sa_relationship_kwargs={"cascade": "delete"}
    )

    @classmethod
    def from_request(cls, model: RoleRequestModel) -> "RoleSchema":
        """Create a `RoleSchema` from a `RoleResponseModel`.

        Args:
            model: The `RoleResponseModel` from which to create the schema.

        Returns:
            The created `RoleSchema`.
        """
        return cls(name=model.name)

    def update(self, role_update: RoleUpdateModel) -> "RoleSchema":
        """Update a `RoleSchema` from a `RoleUpdateModel`.

        Args:
            role_update: The `RoleUpdateModel` from which to update the schema.

        Returns:
            The updated `RoleSchema`.
        """
        for field, value in role_update.dict(
            exclude_unset=True, exclude={"permissions"}
        ).items():
            setattr(self, field, value)

        self.updated = datetime.utcnow()
        return self

    def to_model(self) -> RoleResponseModel:
        """Convert a `RoleSchema` to a `RoleResponseModel`.

        Returns:
            The converted `RoleResponseModel`.
        """
        return RoleResponseModel(
            id=self.id,
            name=self.name,
            created=self.created,
            updated=self.updated,
            permissions={PermissionType(p.name) for p in self.permissions},
        )
from_request(model) classmethod

Create a RoleSchema from a RoleResponseModel.

Parameters:

Name Type Description Default
model RoleRequestModel

The RoleResponseModel from which to create the schema.

required

Returns:

Type Description
RoleSchema

The created RoleSchema.

Source code in zenml/zen_stores/schemas/role_schemas.py
@classmethod
def from_request(cls, model: RoleRequestModel) -> "RoleSchema":
    """Create a `RoleSchema` from a `RoleResponseModel`.

    Args:
        model: The `RoleResponseModel` from which to create the schema.

    Returns:
        The created `RoleSchema`.
    """
    return cls(name=model.name)
to_model(self)

Convert a RoleSchema to a RoleResponseModel.

Returns:

Type Description
RoleResponseModel

The converted RoleResponseModel.

Source code in zenml/zen_stores/schemas/role_schemas.py
def to_model(self) -> RoleResponseModel:
    """Convert a `RoleSchema` to a `RoleResponseModel`.

    Returns:
        The converted `RoleResponseModel`.
    """
    return RoleResponseModel(
        id=self.id,
        name=self.name,
        created=self.created,
        updated=self.updated,
        permissions={PermissionType(p.name) for p in self.permissions},
    )
update(self, role_update)

Update a RoleSchema from a RoleUpdateModel.

Parameters:

Name Type Description Default
role_update RoleUpdateModel

The RoleUpdateModel from which to update the schema.

required

Returns:

Type Description
RoleSchema

The updated RoleSchema.

Source code in zenml/zen_stores/schemas/role_schemas.py
def update(self, role_update: RoleUpdateModel) -> "RoleSchema":
    """Update a `RoleSchema` from a `RoleUpdateModel`.

    Args:
        role_update: The `RoleUpdateModel` from which to update the schema.

    Returns:
        The updated `RoleSchema`.
    """
    for field, value in role_update.dict(
        exclude_unset=True, exclude={"permissions"}
    ).items():
        setattr(self, field, value)

    self.updated = datetime.utcnow()
    return self
TeamRoleAssignmentSchema (BaseSchema) pydantic-model

SQL Model for assigning roles to teams for a given workspace.

Source code in zenml/zen_stores/schemas/role_schemas.py
class TeamRoleAssignmentSchema(BaseSchema, table=True):
    """SQL Model for assigning roles to teams for a given workspace."""

    __tablename__ = "team_role_assignment"

    id: UUID = Field(primary_key=True, default_factory=uuid4)
    role_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=RoleSchema.__tablename__,
        source_column="role_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    team_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=TeamSchema.__tablename__,
        source_column="team_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    role: RoleSchema = Relationship(back_populates="team_role_assignments")
    team: "TeamSchema" = Relationship(back_populates="assigned_roles")
    workspace: Optional["WorkspaceSchema"] = Relationship(
        back_populates="team_role_assignments"
    )

    @classmethod
    def from_request(
        cls, role_assignment: TeamRoleAssignmentRequestModel
    ) -> "TeamRoleAssignmentSchema":
        """Create a `TeamRoleAssignmentSchema` from a `RoleAssignmentRequestModel`.

        Args:
            role_assignment: The `RoleAssignmentRequestModel` from which to
                create the schema.

        Returns:
            The created `TeamRoleAssignmentSchema`.
        """
        return cls(
            role_id=role_assignment.role,
            team_id=role_assignment.team,
            workspace_id=role_assignment.workspace,
        )

    def to_model(self) -> TeamRoleAssignmentResponseModel:
        """Convert a `TeamRoleAssignmentSchema` to a `RoleAssignmentModel`.

        Returns:
            The converted `RoleAssignmentModel`.
        """
        return TeamRoleAssignmentResponseModel(
            id=self.id,
            workspace=self.workspace.to_model() if self.workspace else None,
            team=self.team.to_model(_block_recursion=True),
            role=self.role.to_model(),
            created=self.created,
            updated=self.updated,
        )
from_request(role_assignment) classmethod

Create a TeamRoleAssignmentSchema from a RoleAssignmentRequestModel.

Parameters:

Name Type Description Default
role_assignment TeamRoleAssignmentRequestModel

The RoleAssignmentRequestModel from which to create the schema.

required

Returns:

Type Description
TeamRoleAssignmentSchema

The created TeamRoleAssignmentSchema.

Source code in zenml/zen_stores/schemas/role_schemas.py
@classmethod
def from_request(
    cls, role_assignment: TeamRoleAssignmentRequestModel
) -> "TeamRoleAssignmentSchema":
    """Create a `TeamRoleAssignmentSchema` from a `RoleAssignmentRequestModel`.

    Args:
        role_assignment: The `RoleAssignmentRequestModel` from which to
            create the schema.

    Returns:
        The created `TeamRoleAssignmentSchema`.
    """
    return cls(
        role_id=role_assignment.role,
        team_id=role_assignment.team,
        workspace_id=role_assignment.workspace,
    )
to_model(self)

Convert a TeamRoleAssignmentSchema to a RoleAssignmentModel.

Returns:

Type Description
TeamRoleAssignmentResponseModel

The converted RoleAssignmentModel.

Source code in zenml/zen_stores/schemas/role_schemas.py
def to_model(self) -> TeamRoleAssignmentResponseModel:
    """Convert a `TeamRoleAssignmentSchema` to a `RoleAssignmentModel`.

    Returns:
        The converted `RoleAssignmentModel`.
    """
    return TeamRoleAssignmentResponseModel(
        id=self.id,
        workspace=self.workspace.to_model() if self.workspace else None,
        team=self.team.to_model(_block_recursion=True),
        role=self.role.to_model(),
        created=self.created,
        updated=self.updated,
    )
UserRoleAssignmentSchema (BaseSchema) pydantic-model

SQL Model for assigning roles to users for a given workspace.

Source code in zenml/zen_stores/schemas/role_schemas.py
class UserRoleAssignmentSchema(BaseSchema, table=True):
    """SQL Model for assigning roles to users for a given workspace."""

    __tablename__ = "user_role_assignment"

    id: UUID = Field(primary_key=True, default_factory=uuid4)
    role_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=RoleSchema.__tablename__,
        source_column="role_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    user_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )

    role: RoleSchema = Relationship(back_populates="user_role_assignments")
    user: Optional["UserSchema"] = Relationship(
        back_populates="assigned_roles"
    )
    workspace: Optional["WorkspaceSchema"] = Relationship(
        back_populates="user_role_assignments"
    )

    @classmethod
    def from_request(
        cls, role_assignment: UserRoleAssignmentRequestModel
    ) -> "UserRoleAssignmentSchema":
        """Create a `UserRoleAssignmentSchema` from a `RoleAssignmentRequestModel`.

        Args:
            role_assignment: The `RoleAssignmentRequestModel` from which to
                create the schema.

        Returns:
            The created `UserRoleAssignmentSchema`.
        """
        return cls(
            role_id=role_assignment.role,
            user_id=role_assignment.user,
            workspace_id=role_assignment.workspace,
        )

    def to_model(self) -> UserRoleAssignmentResponseModel:
        """Convert a `UserRoleAssignmentSchema` to a `RoleAssignmentModel`.

        Returns:
            The converted `RoleAssignmentModel`.
        """
        return UserRoleAssignmentResponseModel(
            id=self.id,
            workspace=self.workspace.to_model() if self.workspace else None,
            user=self.user.to_model(_block_recursion=True)
            if self.user
            else None,
            role=self.role.to_model(),
            created=self.created,
            updated=self.updated,
        )
from_request(role_assignment) classmethod

Create a UserRoleAssignmentSchema from a RoleAssignmentRequestModel.

Parameters:

Name Type Description Default
role_assignment UserRoleAssignmentRequestModel

The RoleAssignmentRequestModel from which to create the schema.

required

Returns:

Type Description
UserRoleAssignmentSchema

The created UserRoleAssignmentSchema.

Source code in zenml/zen_stores/schemas/role_schemas.py
@classmethod
def from_request(
    cls, role_assignment: UserRoleAssignmentRequestModel
) -> "UserRoleAssignmentSchema":
    """Create a `UserRoleAssignmentSchema` from a `RoleAssignmentRequestModel`.

    Args:
        role_assignment: The `RoleAssignmentRequestModel` from which to
            create the schema.

    Returns:
        The created `UserRoleAssignmentSchema`.
    """
    return cls(
        role_id=role_assignment.role,
        user_id=role_assignment.user,
        workspace_id=role_assignment.workspace,
    )
to_model(self)

Convert a UserRoleAssignmentSchema to a RoleAssignmentModel.

Returns:

Type Description
UserRoleAssignmentResponseModel

The converted RoleAssignmentModel.

Source code in zenml/zen_stores/schemas/role_schemas.py
def to_model(self) -> UserRoleAssignmentResponseModel:
    """Convert a `UserRoleAssignmentSchema` to a `RoleAssignmentModel`.

    Returns:
        The converted `RoleAssignmentModel`.
    """
    return UserRoleAssignmentResponseModel(
        id=self.id,
        workspace=self.workspace.to_model() if self.workspace else None,
        user=self.user.to_model(_block_recursion=True)
        if self.user
        else None,
        role=self.role.to_model(),
        created=self.created,
        updated=self.updated,
    )

run_metadata_schemas

SQLModel implementation of pipeline run metadata tables.

RunMetadataSchema (BaseSchema) pydantic-model

SQL Model for run metadata.

Source code in zenml/zen_stores/schemas/run_metadata_schemas.py
class RunMetadataSchema(BaseSchema, table=True):
    """SQL Model for run metadata."""

    __tablename__ = "run_metadata"

    pipeline_run_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineRunSchema.__tablename__,
        source_column="pipeline_run_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    pipeline_run: Optional["PipelineRunSchema"] = Relationship(
        back_populates="run_metadata"
    )

    step_run_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="step_run_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    step_run: Optional["StepRunSchema"] = Relationship(
        back_populates="run_metadata"
    )

    artifact_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=ArtifactSchema.__tablename__,
        source_column="artifact_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    artifact: Optional["ArtifactSchema"] = Relationship(
        back_populates="run_metadata"
    )

    stack_component_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StackComponentSchema.__tablename__,
        source_column="stack_component_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    stack_component: Optional["StackComponentSchema"] = Relationship(
        back_populates="run_metadata"
    )

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="run_metadata")

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="run_metadata")

    key: str
    value: str = Field(sa_column=Column(TEXT, nullable=False))
    type: MetadataTypeEnum

    def to_model(self) -> "RunMetadataResponseModel":
        """Convert a `RunMetadataSchema` to a `RunMetadataResponseModel`.

        Returns:
            The created `RunMetadataResponseModel`.
        """
        return RunMetadataResponseModel(
            id=self.id,
            pipeline_run_id=self.pipeline_run_id,
            step_run_id=self.step_run_id,
            artifact_id=self.artifact_id,
            stack_component_id=self.stack_component_id,
            key=self.key,
            value=json.loads(self.value),
            type=self.type,
            workspace=self.workspace.to_model(),
            user=self.user.to_model() if self.user else None,
            created=self.created,
            updated=self.updated,
        )

    @classmethod
    def from_request(
        cls, request: "RunMetadataRequestModel"
    ) -> "RunMetadataSchema":
        """Create a `RunMetadataSchema` from a `RunMetadataRequestModel`.

        Args:
            request: The request model to create the schema from.

        Returns:
            The created `RunMetadataSchema`.
        """
        return cls(
            workspace_id=request.workspace,
            user_id=request.user,
            pipeline_run_id=request.pipeline_run_id,
            step_run_id=request.step_run_id,
            artifact_id=request.artifact_id,
            stack_component_id=request.stack_component_id,
            key=request.key,
            value=json.dumps(request.value),
            type=request.type,
        )
from_request(request) classmethod

Create a RunMetadataSchema from a RunMetadataRequestModel.

Parameters:

Name Type Description Default
request RunMetadataRequestModel

The request model to create the schema from.

required

Returns:

Type Description
RunMetadataSchema

The created RunMetadataSchema.

Source code in zenml/zen_stores/schemas/run_metadata_schemas.py
@classmethod
def from_request(
    cls, request: "RunMetadataRequestModel"
) -> "RunMetadataSchema":
    """Create a `RunMetadataSchema` from a `RunMetadataRequestModel`.

    Args:
        request: The request model to create the schema from.

    Returns:
        The created `RunMetadataSchema`.
    """
    return cls(
        workspace_id=request.workspace,
        user_id=request.user,
        pipeline_run_id=request.pipeline_run_id,
        step_run_id=request.step_run_id,
        artifact_id=request.artifact_id,
        stack_component_id=request.stack_component_id,
        key=request.key,
        value=json.dumps(request.value),
        type=request.type,
    )
to_model(self)

Convert a RunMetadataSchema to a RunMetadataResponseModel.

Returns:

Type Description
RunMetadataResponseModel

The created RunMetadataResponseModel.

Source code in zenml/zen_stores/schemas/run_metadata_schemas.py
def to_model(self) -> "RunMetadataResponseModel":
    """Convert a `RunMetadataSchema` to a `RunMetadataResponseModel`.

    Returns:
        The created `RunMetadataResponseModel`.
    """
    return RunMetadataResponseModel(
        id=self.id,
        pipeline_run_id=self.pipeline_run_id,
        step_run_id=self.step_run_id,
        artifact_id=self.artifact_id,
        stack_component_id=self.stack_component_id,
        key=self.key,
        value=json.loads(self.value),
        type=self.type,
        workspace=self.workspace.to_model(),
        user=self.user.to_model() if self.user else None,
        created=self.created,
        updated=self.updated,
    )

schedule_schema

SQL Model Implementations for Pipeline Schedules.

ScheduleSchema (NamedSchema) pydantic-model

SQL Model for schedules.

Source code in zenml/zen_stores/schemas/schedule_schema.py
class ScheduleSchema(NamedSchema, table=True):
    """SQL Model for schedules."""

    __tablename__ = "schedule"

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="schedules")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: "UserSchema" = Relationship(back_populates="schedules")

    pipeline_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineSchema.__tablename__,
        source_column="pipeline_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    pipeline: "PipelineSchema" = Relationship(back_populates="schedules")
    deployment: Optional["PipelineDeploymentSchema"] = Relationship(
        back_populates="schedule"
    )

    orchestrator_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StackComponentSchema.__tablename__,
        source_column="orchestrator_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    orchestrator: "StackComponentSchema" = Relationship(
        back_populates="schedules"
    )

    active: bool
    cron_expression: Optional[str] = Field(nullable=True)
    start_time: Optional[datetime] = Field(nullable=True)
    end_time: Optional[datetime] = Field(nullable=True)
    interval_second: Optional[float] = Field(nullable=True)
    catchup: bool

    runs: List["PipelineRunSchema"] = Relationship(
        back_populates="schedule",
    )

    @classmethod
    def from_create_model(
        cls, model: ScheduleRequestModel
    ) -> "ScheduleSchema":
        """Create a `ScheduleSchema` from a `ScheduleRequestModel`.

        Args:
            model: The `ScheduleRequestModel` to create the schema from.

        Returns:
            The created `ScheduleSchema`.
        """
        if model.interval_second is not None:
            interval_second = model.interval_second.total_seconds()
        else:
            interval_second = None
        return cls(
            name=model.name,
            workspace_id=model.workspace,
            user_id=model.user,
            pipeline_id=model.pipeline_id,
            orchestrator_id=model.orchestrator_id,
            active=model.active,
            cron_expression=model.cron_expression,
            start_time=model.start_time,
            end_time=model.end_time,
            interval_second=interval_second,
            catchup=model.catchup,
        )

    def from_update_model(
        self, model: ScheduleUpdateModel
    ) -> "ScheduleSchema":
        """Update a `ScheduleSchema` from a `ScheduleUpdateModel`.

        Args:
            model: The `ScheduleUpdateModel` to update the schema from.

        Returns:
            The updated `ScheduleSchema`.
        """
        if model.name is not None:
            self.name = model.name
        if model.active is not None:
            self.active = model.active
        if model.cron_expression is not None:
            self.cron_expression = model.cron_expression
        if model.start_time is not None:
            self.start_time = model.start_time
        if model.end_time is not None:
            self.end_time = model.end_time
        if model.interval_second is not None:
            self.interval_second = model.interval_second.total_seconds()
        if model.catchup is not None:
            self.catchup = model.catchup
        self.updated = datetime.utcnow()
        return self

    def to_model(self) -> ScheduleResponseModel:
        """Convert a `ScheduleSchema` to a `ScheduleResponseModel`.

        Returns:
            The created `ScheduleResponseModel`.
        """
        if self.interval_second is not None:
            interval_second = timedelta(seconds=self.interval_second)
        else:
            interval_second = None
        return ScheduleResponseModel(
            id=self.id,
            name=self.name,
            workspace=self.workspace.to_model(),
            user=self.user.to_model(),
            pipeline_id=self.pipeline_id,
            orchestrator_id=self.orchestrator_id,
            active=self.active,
            cron_expression=self.cron_expression,
            start_time=self.start_time,
            end_time=self.end_time,
            interval_second=interval_second,
            catchup=self.catchup,
            created=self.created,
            updated=self.updated,
        )
from_create_model(model) classmethod

Create a ScheduleSchema from a ScheduleRequestModel.

Parameters:

Name Type Description Default
model ScheduleRequestModel

The ScheduleRequestModel to create the schema from.

required

Returns:

Type Description
ScheduleSchema

The created ScheduleSchema.

Source code in zenml/zen_stores/schemas/schedule_schema.py
@classmethod
def from_create_model(
    cls, model: ScheduleRequestModel
) -> "ScheduleSchema":
    """Create a `ScheduleSchema` from a `ScheduleRequestModel`.

    Args:
        model: The `ScheduleRequestModel` to create the schema from.

    Returns:
        The created `ScheduleSchema`.
    """
    if model.interval_second is not None:
        interval_second = model.interval_second.total_seconds()
    else:
        interval_second = None
    return cls(
        name=model.name,
        workspace_id=model.workspace,
        user_id=model.user,
        pipeline_id=model.pipeline_id,
        orchestrator_id=model.orchestrator_id,
        active=model.active,
        cron_expression=model.cron_expression,
        start_time=model.start_time,
        end_time=model.end_time,
        interval_second=interval_second,
        catchup=model.catchup,
    )
from_update_model(self, model)

Update a ScheduleSchema from a ScheduleUpdateModel.

Parameters:

Name Type Description Default
model ScheduleUpdateModel

The ScheduleUpdateModel to update the schema from.

required

Returns:

Type Description
ScheduleSchema

The updated ScheduleSchema.

Source code in zenml/zen_stores/schemas/schedule_schema.py
def from_update_model(
    self, model: ScheduleUpdateModel
) -> "ScheduleSchema":
    """Update a `ScheduleSchema` from a `ScheduleUpdateModel`.

    Args:
        model: The `ScheduleUpdateModel` to update the schema from.

    Returns:
        The updated `ScheduleSchema`.
    """
    if model.name is not None:
        self.name = model.name
    if model.active is not None:
        self.active = model.active
    if model.cron_expression is not None:
        self.cron_expression = model.cron_expression
    if model.start_time is not None:
        self.start_time = model.start_time
    if model.end_time is not None:
        self.end_time = model.end_time
    if model.interval_second is not None:
        self.interval_second = model.interval_second.total_seconds()
    if model.catchup is not None:
        self.catchup = model.catchup
    self.updated = datetime.utcnow()
    return self
to_model(self)

Convert a ScheduleSchema to a ScheduleResponseModel.

Returns:

Type Description
ScheduleResponseModel

The created ScheduleResponseModel.

Source code in zenml/zen_stores/schemas/schedule_schema.py
def to_model(self) -> ScheduleResponseModel:
    """Convert a `ScheduleSchema` to a `ScheduleResponseModel`.

    Returns:
        The created `ScheduleResponseModel`.
    """
    if self.interval_second is not None:
        interval_second = timedelta(seconds=self.interval_second)
    else:
        interval_second = None
    return ScheduleResponseModel(
        id=self.id,
        name=self.name,
        workspace=self.workspace.to_model(),
        user=self.user.to_model(),
        pipeline_id=self.pipeline_id,
        orchestrator_id=self.orchestrator_id,
        active=self.active,
        cron_expression=self.cron_expression,
        start_time=self.start_time,
        end_time=self.end_time,
        interval_second=interval_second,
        catchup=self.catchup,
        created=self.created,
        updated=self.updated,
    )

schema_utils

Utility functions for SQLModel schemas.

build_foreign_key_field(source, target, source_column, target_column, ondelete, nullable, **sa_column_kwargs)

Build a SQLModel foreign key field.

Parameters:

Name Type Description Default
source str

Source table name.

required
target str

Target table name.

required
source_column str

Source column name.

required
target_column str

Target column name.

required
ondelete str

On delete behavior.

required
nullable bool

Whether the field is nullable.

required
**sa_column_kwargs Any

Keyword arguments for the SQLAlchemy column.

{}

Returns:

Type Description
Any

SQLModel foreign key field.

Exceptions:

Type Description
ValueError

If the ondelete and nullable arguments are not compatible.

Source code in zenml/zen_stores/schemas/schema_utils.py
def build_foreign_key_field(
    source: str,
    target: str,
    source_column: str,
    target_column: str,
    ondelete: str,
    nullable: bool,
    **sa_column_kwargs: Any,
) -> Any:
    """Build a SQLModel foreign key field.

    Args:
        source: Source table name.
        target: Target table name.
        source_column: Source column name.
        target_column: Target column name.
        ondelete: On delete behavior.
        nullable: Whether the field is nullable.
        **sa_column_kwargs: Keyword arguments for the SQLAlchemy column.

    Returns:
        SQLModel foreign key field.

    Raises:
        ValueError: If the ondelete and nullable arguments are not compatible.
    """
    if not nullable and ondelete == "SET NULL":
        raise ValueError(
            "Cannot set ondelete to SET NULL if the field is not nullable."
        )
    constraint_name = foreign_key_constraint_name(
        source=source,
        target=target,
        source_column=source_column,
    )
    return Field(
        sa_column=Column(
            ForeignKey(
                f"{target}.{target_column}",
                name=constraint_name,
                ondelete=ondelete,
            ),
            nullable=nullable,
            **sa_column_kwargs,
        ),
    )
foreign_key_constraint_name(source, target, source_column)

Defines the name of a foreign key constraint.

For simplicity, we use the naming convention used by alembic here: https://alembic.sqlalchemy.org/en/latest/batch.html#dropping-unnamed-or-named-foreign-key-constraints.

Parameters:

Name Type Description Default
source str

Source table name.

required
target str

Target table name.

required
source_column str

Source column name.

required

Returns:

Type Description
str

Name of the foreign key constraint.

Source code in zenml/zen_stores/schemas/schema_utils.py
def foreign_key_constraint_name(
    source: str, target: str, source_column: str
) -> str:
    """Defines the name of a foreign key constraint.

    For simplicity, we use the naming convention used by alembic here:
    https://alembic.sqlalchemy.org/en/latest/batch.html#dropping-unnamed-or-named-foreign-key-constraints.

    Args:
        source: Source table name.
        target: Target table name.
        source_column: Source column name.

    Returns:
        Name of the foreign key constraint.
    """
    return f"fk_{source}_{source_column}_{target}"

secret_schemas

SQL Model Implementations for Secrets.

SecretSchema (NamedSchema) pydantic-model

SQL Model for secrets.

Attributes:

Name Type Description
name str

The name of the secret.

values bytes

The values of the secret.

Source code in zenml/zen_stores/schemas/secret_schemas.py
class SecretSchema(NamedSchema, table=True):
    """SQL Model for secrets.

    Attributes:
        name: The name of the secret.
        values: The values of the secret.
    """

    __tablename__ = "secret"

    scope: SecretScope

    values: bytes = Field(sa_column=Column(TEXT, nullable=False))

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="secrets")

    user_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    user: "UserSchema" = Relationship(back_populates="secrets")

    @classmethod
    def _dump_secret_values(
        cls, values: Dict[str, str], encryption_engine: Optional[AesGcmEngine]
    ) -> bytes:
        """Dump the secret values to a string.

        Args:
            values: The secret values to dump.
            encryption_engine: The encryption engine to use to encrypt the
                secret values. If None, the values will be base64 encoded.

        Raises:
            ValueError: If the secret values do not fit in the database field.

        Returns:
            The serialized encrypted secret values.
        """
        serialized_values = json.dumps(values)

        if encryption_engine is None:
            encrypted_values = base64.b64encode(
                serialized_values.encode("utf-8")
            )
        else:
            encrypted_values = encryption_engine.encrypt(serialized_values)

        if len(encrypted_values) > TEXT_FIELD_MAX_LENGTH:
            raise ValueError(
                "Database representation of secret values exceeds max "
                "length. Please use fewer values or consider using shorter "
                "secret keys and/or values."
            )

        return encrypted_values

    @classmethod
    def _load_secret_values(
        cls,
        encrypted_values: bytes,
        encryption_engine: Optional[AesGcmEngine] = None,
    ) -> Dict[str, str]:
        """Load the secret values from a base64 encoded byte string.

        Args:
            encrypted_values: The serialized encrypted secret values.
            encryption_engine: The encryption engine to use to decrypt the
                secret values. If None, the values will be base64 decoded.

        Returns:
            The loaded secret values.
        """
        if encryption_engine is None:
            serialized_values = base64.b64decode(encrypted_values).decode()
        else:
            serialized_values = encryption_engine.decrypt(encrypted_values)

        return cast(
            Dict[str, str],
            json.loads(serialized_values),
        )

    @classmethod
    def from_request(
        cls,
        secret: SecretRequestModel,
        encryption_engine: Optional[AesGcmEngine] = None,
    ) -> "SecretSchema":
        """Create a `SecretSchema` from a `SecretRequestModel`.

        Args:
            secret: The `SecretRequestModel` from which to create the schema.
            encryption_engine: The encryption engine to use to encrypt the
                secret values. If None, the values will be base64 encoded.

        Returns:
            The created `SecretSchema`.
        """
        assert secret.user is not None, "User must be set for secret creation."
        return cls(
            name=secret.name,
            scope=secret.scope,
            workspace_id=secret.workspace,
            user_id=secret.user,
            values=cls._dump_secret_values(
                secret.secret_values, encryption_engine
            ),
        )

    def update(
        self,
        secret_update: SecretUpdateModel,
        encryption_engine: Optional[AesGcmEngine] = None,
    ) -> "SecretSchema":
        """Update a `SecretSchema` from a `SecretUpdateModel`.

        The method also knows how to handle the `values` field of the secret
        update model: It will update the existing values with the new values
        and drop `None` values.

        Args:
            secret_update: The `SecretUpdateModel` from which to update the schema.
            encryption_engine: The encryption engine to use to encrypt the
                secret values. If None, the values will be base64 encoded.

        Returns:
            The updated `SecretSchema`.
        """
        for field, value in secret_update.dict(
            exclude_unset=True, exclude={"workspace", "user"}
        ).items():
            if field == "values":
                existing_values = self._load_secret_values(
                    self.values, encryption_engine
                )
                existing_values.update(secret_update.secret_values)
                # Drop values removed in the update
                for k, v in secret_update.values.items():
                    if v is None and k in existing_values:
                        del existing_values[k]
                self.values = self._dump_secret_values(
                    existing_values, encryption_engine
                )
            else:
                setattr(self, field, value)

        self.updated = datetime.utcnow()
        return self

    def to_model(
        self,
        encryption_engine: Optional[AesGcmEngine] = None,
        include_values: bool = True,
    ) -> SecretResponseModel:
        """Converts a secret schema to a secret model.

        Args:
            encryption_engine: The encryption engine to use to decrypt the
                secret values. If None, the values will be base64 decoded.
            include_values: Whether to include the secret values in the
                response model or not.

        Returns:
            The secret model.
        """
        return SecretResponseModel(
            id=self.id,
            name=self.name,
            scope=self.scope,
            values=self._load_secret_values(self.values, encryption_engine)
            if include_values
            else {},
            user=self.user.to_model() if self.user else None,
            workspace=self.workspace.to_model(),
            created=self.created,
            updated=self.updated,
        )
from_request(secret, encryption_engine=None) classmethod

Create a SecretSchema from a SecretRequestModel.

Parameters:

Name Type Description Default
secret SecretRequestModel

The SecretRequestModel from which to create the schema.

required
encryption_engine Optional[sqlalchemy_utils.types.encrypted.encrypted_type.AesGcmEngine]

The encryption engine to use to encrypt the secret values. If None, the values will be base64 encoded.

None

Returns:

Type Description
SecretSchema

The created SecretSchema.

Source code in zenml/zen_stores/schemas/secret_schemas.py
@classmethod
def from_request(
    cls,
    secret: SecretRequestModel,
    encryption_engine: Optional[AesGcmEngine] = None,
) -> "SecretSchema":
    """Create a `SecretSchema` from a `SecretRequestModel`.

    Args:
        secret: The `SecretRequestModel` from which to create the schema.
        encryption_engine: The encryption engine to use to encrypt the
            secret values. If None, the values will be base64 encoded.

    Returns:
        The created `SecretSchema`.
    """
    assert secret.user is not None, "User must be set for secret creation."
    return cls(
        name=secret.name,
        scope=secret.scope,
        workspace_id=secret.workspace,
        user_id=secret.user,
        values=cls._dump_secret_values(
            secret.secret_values, encryption_engine
        ),
    )
to_model(self, encryption_engine=None, include_values=True)

Converts a secret schema to a secret model.

Parameters:

Name Type Description Default
encryption_engine Optional[sqlalchemy_utils.types.encrypted.encrypted_type.AesGcmEngine]

The encryption engine to use to decrypt the secret values. If None, the values will be base64 decoded.

None
include_values bool

Whether to include the secret values in the response model or not.

True

Returns:

Type Description
SecretResponseModel

The secret model.

Source code in zenml/zen_stores/schemas/secret_schemas.py
def to_model(
    self,
    encryption_engine: Optional[AesGcmEngine] = None,
    include_values: bool = True,
) -> SecretResponseModel:
    """Converts a secret schema to a secret model.

    Args:
        encryption_engine: The encryption engine to use to decrypt the
            secret values. If None, the values will be base64 decoded.
        include_values: Whether to include the secret values in the
            response model or not.

    Returns:
        The secret model.
    """
    return SecretResponseModel(
        id=self.id,
        name=self.name,
        scope=self.scope,
        values=self._load_secret_values(self.values, encryption_engine)
        if include_values
        else {},
        user=self.user.to_model() if self.user else None,
        workspace=self.workspace.to_model(),
        created=self.created,
        updated=self.updated,
    )
update(self, secret_update, encryption_engine=None)

Update a SecretSchema from a SecretUpdateModel.

The method also knows how to handle the values field of the secret update model: It will update the existing values with the new values and drop None values.

Parameters:

Name Type Description Default
secret_update SecretUpdateModel

The SecretUpdateModel from which to update the schema.

required
encryption_engine Optional[sqlalchemy_utils.types.encrypted.encrypted_type.AesGcmEngine]

The encryption engine to use to encrypt the secret values. If None, the values will be base64 encoded.

None

Returns:

Type Description
SecretSchema

The updated SecretSchema.

Source code in zenml/zen_stores/schemas/secret_schemas.py
def update(
    self,
    secret_update: SecretUpdateModel,
    encryption_engine: Optional[AesGcmEngine] = None,
) -> "SecretSchema":
    """Update a `SecretSchema` from a `SecretUpdateModel`.

    The method also knows how to handle the `values` field of the secret
    update model: It will update the existing values with the new values
    and drop `None` values.

    Args:
        secret_update: The `SecretUpdateModel` from which to update the schema.
        encryption_engine: The encryption engine to use to encrypt the
            secret values. If None, the values will be base64 encoded.

    Returns:
        The updated `SecretSchema`.
    """
    for field, value in secret_update.dict(
        exclude_unset=True, exclude={"workspace", "user"}
    ).items():
        if field == "values":
            existing_values = self._load_secret_values(
                self.values, encryption_engine
            )
            existing_values.update(secret_update.secret_values)
            # Drop values removed in the update
            for k, v in secret_update.values.items():
                if v is None and k in existing_values:
                    del existing_values[k]
            self.values = self._dump_secret_values(
                existing_values, encryption_engine
            )
        else:
            setattr(self, field, value)

    self.updated = datetime.utcnow()
    return self

service_connector_schemas

SQL Model Implementations for Service Connectors.

ServiceConnectorSchema (ShareableSchema) pydantic-model

SQL Model for service connectors.

Source code in zenml/zen_stores/schemas/service_connector_schemas.py
class ServiceConnectorSchema(ShareableSchema, table=True):
    """SQL Model for service connectors."""

    __tablename__ = "service_connector"

    connector_type: str = Field(sa_column=Column(TEXT))
    description: str
    auth_method: str = Field(sa_column=Column(TEXT))
    resource_types: bytes
    resource_id: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
    supports_instances: bool
    configuration: Optional[bytes]
    secret_id: Optional[UUID]
    expires_at: Optional[datetime]
    expiration_seconds: Optional[int]
    labels: Optional[bytes]

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(
        back_populates="service_connectors"
    )

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(
        back_populates="service_connectors"
    )
    components: List["StackComponentSchema"] = Relationship(
        back_populates="connector",
    )

    @property
    def resource_types_list(self) -> List[str]:
        """Returns the resource types as a list.

        Returns:
            The resource types as a list.
        """
        resource_types = json.loads(
            base64.b64decode(self.resource_types).decode()
        )
        assert isinstance(resource_types, list)
        return resource_types

    @property
    def labels_dict(self) -> Dict[str, str]:
        """Returns the labels as a dictionary.

        Returns:
            The labels as a dictionary.
        """
        if self.labels is None:
            return {}
        labels_dict = json.loads(base64.b64decode(self.labels).decode())
        return cast(Dict[str, str], labels_dict)

    def has_labels(self, labels: Dict[str, Optional[str]]) -> bool:
        """Checks if the connector has the given labels.

        Args:
            labels: The labels to check for.

        Returns:
            Whether the connector has the given labels.
        """
        return all(
            self.labels_dict.get(key, None) == value
            for key, value in labels.items()
            if value is not None
        ) and all(
            key in self.labels_dict
            for key, value in labels.items()
            if value is None
        )

    @classmethod
    def from_request(
        cls,
        connector_request: ServiceConnectorRequestModel,
        secret_id: Optional[UUID] = None,
    ) -> "ServiceConnectorSchema":
        """Create a `ServiceConnectorSchema` from a `ServiceConnectorRequestModel`.

        Args:
            connector_request: The `ServiceConnectorRequestModel` from which to
                create the schema.
            secret_id: The ID of the secret to use for this connector.

        Returns:
            The created `ServiceConnectorSchema`.
        """
        assert connector_request.user is not None, "User must be set."
        return cls(
            workspace_id=connector_request.workspace,
            user_id=connector_request.user,
            is_shared=connector_request.is_shared,
            name=connector_request.name,
            description=connector_request.description,
            connector_type=connector_request.type,
            auth_method=connector_request.auth_method,
            resource_types=base64.b64encode(
                json.dumps(connector_request.resource_types).encode("utf-8")
            ),
            resource_id=connector_request.resource_id,
            supports_instances=connector_request.supports_instances,
            configuration=base64.b64encode(
                json.dumps(connector_request.configuration).encode("utf-8")
            )
            if connector_request.configuration
            else None,
            secret_id=secret_id,
            expires_at=connector_request.expires_at,
            expiration_seconds=connector_request.expiration_seconds,
            labels=base64.b64encode(
                json.dumps(connector_request.labels).encode("utf-8")
            )
            if connector_request.labels
            else None,
        )

    def update(
        self,
        connector_update: ServiceConnectorUpdateModel,
        secret_id: Optional[UUID] = None,
    ) -> "ServiceConnectorSchema":
        """Updates a `ServiceConnectorSchema` from a `ServiceConnectorUpdateModel`.

        Args:
            connector_update: The `ServiceConnectorUpdateModel` to update from.
            secret_id: The ID of the secret to use for this connector.

        Returns:
            The updated `ServiceConnectorSchema`.
        """
        for field, value in connector_update.dict(
            exclude_unset=False,
            exclude={"workspace", "user", "secrets"},
        ).items():
            if value is None:
                if field == "resource_id":
                    # The resource ID field in the update is special: if set
                    # to None in the update, it triggers the existing resource
                    # ID to be cleared.
                    self.resource_id = None
                if field == "expiration_seconds":
                    # The expiration_seconds field in the update is special:
                    # if set to None in the update, it triggers the existing
                    # expiration_seconds to be cleared.
                    self.expiration_seconds = None
                continue
            if field == "configuration":
                self.configuration = (
                    base64.b64encode(
                        json.dumps(connector_update.configuration).encode(
                            "utf-8"
                        )
                    )
                    if connector_update.configuration
                    else None
                )
            elif field == "resource_types":
                self.resource_types = base64.b64encode(
                    json.dumps(connector_update.resource_types).encode("utf-8")
                )
            elif field == "labels":
                self.labels = (
                    base64.b64encode(
                        json.dumps(connector_update.labels).encode("utf-8")
                    )
                    if connector_update.labels
                    else None
                )
            else:
                setattr(self, field, value)
        self.secret_id = secret_id
        self.updated = datetime.utcnow()
        return self

    def to_model(
        self,
    ) -> "ServiceConnectorResponseModel":
        """Creates a `ServiceConnectorModel` from an instance of a `ServiceConnectorSchema`.

        Returns:
            A `ServiceConnectorModel`
        """
        return ServiceConnectorResponseModel(
            id=self.id,
            name=self.name,
            description=self.description,
            user=self.user.to_model(True) if self.user else None,
            workspace=self.workspace.to_model(),
            is_shared=self.is_shared,
            created=self.created,
            updated=self.updated,
            connector_type=self.connector_type,
            auth_method=self.auth_method,
            resource_types=self.resource_types_list,
            resource_id=self.resource_id,
            supports_instances=self.supports_instances,
            configuration=json.loads(
                base64.b64decode(self.configuration).decode()
            )
            if self.configuration
            else {},
            secret_id=self.secret_id,
            expires_at=self.expires_at,
            expiration_seconds=self.expiration_seconds,
            labels=self.labels_dict,
        )
labels_dict: Dict[str, str] property readonly

Returns the labels as a dictionary.

Returns:

Type Description
Dict[str, str]

The labels as a dictionary.

resource_types_list: List[str] property readonly

Returns the resource types as a list.

Returns:

Type Description
List[str]

The resource types as a list.

from_request(connector_request, secret_id=None) classmethod

Create a ServiceConnectorSchema from a ServiceConnectorRequestModel.

Parameters:

Name Type Description Default
connector_request ServiceConnectorRequestModel

The ServiceConnectorRequestModel from which to create the schema.

required
secret_id Optional[uuid.UUID]

The ID of the secret to use for this connector.

None

Returns:

Type Description
ServiceConnectorSchema

The created ServiceConnectorSchema.

Source code in zenml/zen_stores/schemas/service_connector_schemas.py
@classmethod
def from_request(
    cls,
    connector_request: ServiceConnectorRequestModel,
    secret_id: Optional[UUID] = None,
) -> "ServiceConnectorSchema":
    """Create a `ServiceConnectorSchema` from a `ServiceConnectorRequestModel`.

    Args:
        connector_request: The `ServiceConnectorRequestModel` from which to
            create the schema.
        secret_id: The ID of the secret to use for this connector.

    Returns:
        The created `ServiceConnectorSchema`.
    """
    assert connector_request.user is not None, "User must be set."
    return cls(
        workspace_id=connector_request.workspace,
        user_id=connector_request.user,
        is_shared=connector_request.is_shared,
        name=connector_request.name,
        description=connector_request.description,
        connector_type=connector_request.type,
        auth_method=connector_request.auth_method,
        resource_types=base64.b64encode(
            json.dumps(connector_request.resource_types).encode("utf-8")
        ),
        resource_id=connector_request.resource_id,
        supports_instances=connector_request.supports_instances,
        configuration=base64.b64encode(
            json.dumps(connector_request.configuration).encode("utf-8")
        )
        if connector_request.configuration
        else None,
        secret_id=secret_id,
        expires_at=connector_request.expires_at,
        expiration_seconds=connector_request.expiration_seconds,
        labels=base64.b64encode(
            json.dumps(connector_request.labels).encode("utf-8")
        )
        if connector_request.labels
        else None,
    )
has_labels(self, labels)

Checks if the connector has the given labels.

Parameters:

Name Type Description Default
labels Dict[str, Optional[str]]

The labels to check for.

required

Returns:

Type Description
bool

Whether the connector has the given labels.

Source code in zenml/zen_stores/schemas/service_connector_schemas.py
def has_labels(self, labels: Dict[str, Optional[str]]) -> bool:
    """Checks if the connector has the given labels.

    Args:
        labels: The labels to check for.

    Returns:
        Whether the connector has the given labels.
    """
    return all(
        self.labels_dict.get(key, None) == value
        for key, value in labels.items()
        if value is not None
    ) and all(
        key in self.labels_dict
        for key, value in labels.items()
        if value is None
    )
to_model(self)

Creates a ServiceConnectorModel from an instance of a ServiceConnectorSchema.

Returns:

Type Description
ServiceConnectorResponseModel

A ServiceConnectorModel

Source code in zenml/zen_stores/schemas/service_connector_schemas.py
def to_model(
    self,
) -> "ServiceConnectorResponseModel":
    """Creates a `ServiceConnectorModel` from an instance of a `ServiceConnectorSchema`.

    Returns:
        A `ServiceConnectorModel`
    """
    return ServiceConnectorResponseModel(
        id=self.id,
        name=self.name,
        description=self.description,
        user=self.user.to_model(True) if self.user else None,
        workspace=self.workspace.to_model(),
        is_shared=self.is_shared,
        created=self.created,
        updated=self.updated,
        connector_type=self.connector_type,
        auth_method=self.auth_method,
        resource_types=self.resource_types_list,
        resource_id=self.resource_id,
        supports_instances=self.supports_instances,
        configuration=json.loads(
            base64.b64decode(self.configuration).decode()
        )
        if self.configuration
        else {},
        secret_id=self.secret_id,
        expires_at=self.expires_at,
        expiration_seconds=self.expiration_seconds,
        labels=self.labels_dict,
    )
update(self, connector_update, secret_id=None)

Updates a ServiceConnectorSchema from a ServiceConnectorUpdateModel.

Parameters:

Name Type Description Default
connector_update ServiceConnectorUpdateModel

The ServiceConnectorUpdateModel to update from.

required
secret_id Optional[uuid.UUID]

The ID of the secret to use for this connector.

None

Returns:

Type Description
ServiceConnectorSchema

The updated ServiceConnectorSchema.

Source code in zenml/zen_stores/schemas/service_connector_schemas.py
def update(
    self,
    connector_update: ServiceConnectorUpdateModel,
    secret_id: Optional[UUID] = None,
) -> "ServiceConnectorSchema":
    """Updates a `ServiceConnectorSchema` from a `ServiceConnectorUpdateModel`.

    Args:
        connector_update: The `ServiceConnectorUpdateModel` to update from.
        secret_id: The ID of the secret to use for this connector.

    Returns:
        The updated `ServiceConnectorSchema`.
    """
    for field, value in connector_update.dict(
        exclude_unset=False,
        exclude={"workspace", "user", "secrets"},
    ).items():
        if value is None:
            if field == "resource_id":
                # The resource ID field in the update is special: if set
                # to None in the update, it triggers the existing resource
                # ID to be cleared.
                self.resource_id = None
            if field == "expiration_seconds":
                # The expiration_seconds field in the update is special:
                # if set to None in the update, it triggers the existing
                # expiration_seconds to be cleared.
                self.expiration_seconds = None
            continue
        if field == "configuration":
            self.configuration = (
                base64.b64encode(
                    json.dumps(connector_update.configuration).encode(
                        "utf-8"
                    )
                )
                if connector_update.configuration
                else None
            )
        elif field == "resource_types":
            self.resource_types = base64.b64encode(
                json.dumps(connector_update.resource_types).encode("utf-8")
            )
        elif field == "labels":
            self.labels = (
                base64.b64encode(
                    json.dumps(connector_update.labels).encode("utf-8")
                )
                if connector_update.labels
                else None
            )
        else:
            setattr(self, field, value)
    self.secret_id = secret_id
    self.updated = datetime.utcnow()
    return self

stack_schemas

SQL Model Implementations for Stacks.

StackCompositionSchema (SQLModel) pydantic-model

SQL Model for stack definitions.

Join table between Stacks and StackComponents.

Source code in zenml/zen_stores/schemas/stack_schemas.py
class StackCompositionSchema(SQLModel, table=True):
    """SQL Model for stack definitions.

    Join table between Stacks and StackComponents.
    """

    __tablename__ = "stack_composition"

    stack_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="stack",  # TODO: how to reference `StackSchema.__tablename__`?
        source_column="stack_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    component_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="stack_component",  # TODO: how to reference `StackComponentSchema.__tablename__`?
        source_column="component_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
StackSchema (ShareableSchema) pydantic-model

SQL Model for stacks.

Source code in zenml/zen_stores/schemas/stack_schemas.py
class StackSchema(ShareableSchema, table=True):
    """SQL Model for stacks."""

    __tablename__ = "stack"

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="stacks")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="stacks")

    components: List["StackComponentSchema"] = Relationship(
        back_populates="stacks",
        link_model=StackCompositionSchema,
    )
    runs: List["PipelineRunSchema"] = Relationship(back_populates="stack")
    builds: List["PipelineBuildSchema"] = Relationship(back_populates="stack")
    deployments: List["PipelineDeploymentSchema"] = Relationship(
        back_populates="stack",
    )

    def update(
        self,
        stack_update: "StackUpdateModel",
        components: List["StackComponentSchema"],
    ) -> "StackSchema":
        """Updates a stack schema with a stack update model.

        Args:
            stack_update: `StackUpdateModel` to update the stack with.
            components: List of `StackComponentSchema` to update the stack with.

        Returns:
            The updated StackSchema.
        """
        for field, value in stack_update.dict(exclude_unset=True).items():
            if field == "components":
                self.components = components

            elif field == "user":
                assert self.user_id == value

            elif field == "workspace":
                assert self.workspace_id == value

            else:
                setattr(self, field, value)

        self.updated = datetime.utcnow()
        return self

    def to_model(self) -> "StackResponseModel":
        """Converts the schema to a model.

        Returns:
            The converted model.
        """
        return StackResponseModel(
            id=self.id,
            name=self.name,
            user=self.user.to_model(True) if self.user else None,
            workspace=self.workspace.to_model(),
            is_shared=self.is_shared,
            components={c.type: [c.to_model()] for c in self.components},
            created=self.created,
            updated=self.updated,
        )
to_model(self)

Converts the schema to a model.

Returns:

Type Description
StackResponseModel

The converted model.

Source code in zenml/zen_stores/schemas/stack_schemas.py
def to_model(self) -> "StackResponseModel":
    """Converts the schema to a model.

    Returns:
        The converted model.
    """
    return StackResponseModel(
        id=self.id,
        name=self.name,
        user=self.user.to_model(True) if self.user else None,
        workspace=self.workspace.to_model(),
        is_shared=self.is_shared,
        components={c.type: [c.to_model()] for c in self.components},
        created=self.created,
        updated=self.updated,
    )
update(self, stack_update, components)

Updates a stack schema with a stack update model.

Parameters:

Name Type Description Default
stack_update StackUpdateModel

StackUpdateModel to update the stack with.

required
components List[StackComponentSchema]

List of StackComponentSchema to update the stack with.

required

Returns:

Type Description
StackSchema

The updated StackSchema.

Source code in zenml/zen_stores/schemas/stack_schemas.py
def update(
    self,
    stack_update: "StackUpdateModel",
    components: List["StackComponentSchema"],
) -> "StackSchema":
    """Updates a stack schema with a stack update model.

    Args:
        stack_update: `StackUpdateModel` to update the stack with.
        components: List of `StackComponentSchema` to update the stack with.

    Returns:
        The updated StackSchema.
    """
    for field, value in stack_update.dict(exclude_unset=True).items():
        if field == "components":
            self.components = components

        elif field == "user":
            assert self.user_id == value

        elif field == "workspace":
            assert self.workspace_id == value

        else:
            setattr(self, field, value)

    self.updated = datetime.utcnow()
    return self

step_run_schemas

SQLModel implementation of step run tables.

StepRunInputArtifactSchema (SQLModel) pydantic-model

SQL Model that defines which artifacts are inputs to which step.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
class StepRunInputArtifactSchema(SQLModel, table=True):
    """SQL Model that defines which artifacts are inputs to which step."""

    __tablename__ = "step_run_input_artifact"

    step_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="step_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    step_run: StepRunSchema = Relationship(back_populates="input_artifacts")
    artifact_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=ArtifactSchema.__tablename__,
        source_column="artifact_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    artifact: ArtifactSchema = Relationship(
        back_populates="input_to_step_runs"
    )
    name: str = Field(nullable=False, primary_key=True)
StepRunOutputArtifactSchema (SQLModel) pydantic-model

SQL Model that defines which artifacts are outputs of which step.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
class StepRunOutputArtifactSchema(SQLModel, table=True):
    """SQL Model that defines which artifacts are outputs of which step."""

    __tablename__ = "step_run_output_artifact"

    step_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="step_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    step_run: StepRunSchema = Relationship(back_populates="output_artifacts")
    artifact_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=ArtifactSchema.__tablename__,
        source_column="artifact_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    artifact: ArtifactSchema = Relationship(
        back_populates="output_of_step_runs"
    )
    name: str
StepRunParentsSchema (SQLModel) pydantic-model

SQL Model that defines the order of steps.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
class StepRunParentsSchema(SQLModel, table=True):
    """SQL Model that defines the order of steps."""

    __tablename__ = "step_run_parents"

    parent_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="parent_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    parent: StepRunSchema = Relationship(
        back_populates="children",
        sa_relationship_kwargs={
            "primaryjoin": "StepRunParentsSchema.parent_id == StepRunSchema.id"
        },
    )
    child_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="child_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    child: StepRunSchema = Relationship(
        back_populates="parents",
        sa_relationship_kwargs={
            "primaryjoin": "StepRunParentsSchema.child_id == StepRunSchema.id"
        },
    )
StepRunSchema (NamedSchema) pydantic-model

SQL Model for steps of pipeline runs.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
class StepRunSchema(NamedSchema, table=True):
    """SQL Model for steps of pipeline runs."""

    __tablename__ = "step_run"

    pipeline_run_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=PipelineRunSchema.__tablename__,
        source_column="pipeline_run_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    pipeline_run: "PipelineRunSchema" = Relationship(
        back_populates="step_runs"
    )
    original_step_run_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=__tablename__,
        source_column="original_step_run_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: Optional["UserSchema"] = Relationship(back_populates="step_runs")

    workspace_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=WorkspaceSchema.__tablename__,
        source_column="workspace_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    workspace: "WorkspaceSchema" = Relationship(back_populates="step_runs")

    enable_cache: Optional[bool] = Field(nullable=True)
    enable_artifact_metadata: Optional[bool] = Field(nullable=True)
    code_hash: Optional[str] = Field(nullable=True)
    cache_key: Optional[str] = Field(nullable=True)
    start_time: Optional[datetime] = Field(nullable=True)
    end_time: Optional[datetime] = Field(nullable=True)
    status: ExecutionStatus
    entrypoint_name: str
    parameters: str = Field(
        sa_column=Column(
            String(length=MEDIUMTEXT_MAX_LENGTH).with_variant(
                MEDIUMTEXT, "mysql"
            ),
            nullable=False,
        )
    )
    step_configuration: str = Field(
        sa_column=Column(
            String(length=MEDIUMTEXT_MAX_LENGTH).with_variant(
                MEDIUMTEXT, "mysql"
            ),
            nullable=False,
        )
    )
    caching_parameters: Optional[str] = Field(
        sa_column=Column(TEXT, nullable=True)
    )
    docstring: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
    source_code: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
    num_outputs: Optional[int]

    run_metadata: List["RunMetadataSchema"] = Relationship(
        back_populates="step_run", sa_relationship_kwargs={"cascade": "delete"}
    )
    input_artifacts: List["StepRunInputArtifactSchema"] = Relationship(
        back_populates="step_run", sa_relationship_kwargs={"cascade": "delete"}
    )
    output_artifacts: List["StepRunOutputArtifactSchema"] = Relationship(
        back_populates="step_run", sa_relationship_kwargs={"cascade": "delete"}
    )
    logs: Optional["LogsSchema"] = Relationship(
        back_populates="step_run",
        sa_relationship_kwargs={"cascade": "delete", "uselist": False},
    )
    parents: List["StepRunParentsSchema"] = Relationship(
        back_populates="child",
        sa_relationship_kwargs={
            "cascade": "delete",
            "primaryjoin": "StepRunParentsSchema.child_id == StepRunSchema.id",
        },
    )
    children: List["StepRunParentsSchema"] = Relationship(
        back_populates="parent",
        sa_relationship_kwargs={
            "cascade": "delete",
            "primaryjoin": "StepRunParentsSchema.parent_id == StepRunSchema.id",
        },
    )

    @classmethod
    def from_request(cls, request: StepRunRequestModel) -> "StepRunSchema":
        """Create a step run schema from a step run request model.

        Args:
            request: The step run request model.

        Returns:
            The step run schema.
        """
        step_config = request.config
        full_step_config = Step(spec=request.spec, config=request.config)
        return cls(
            name=request.name,
            pipeline_run_id=request.pipeline_run_id,
            original_step_run_id=request.original_step_run_id,
            workspace_id=request.workspace,
            user_id=request.user,
            enable_cache=step_config.enable_cache,
            enable_artifact_metadata=step_config.enable_artifact_metadata,
            code_hash=step_config.caching_parameters.get(
                STEP_SOURCE_PARAMETER_NAME
            ),
            cache_key=request.cache_key,
            start_time=request.start_time,
            end_time=request.end_time,
            entrypoint_name=step_config.name,
            parameters=json.dumps(
                step_config.parameters,
                default=pydantic_encoder,
                sort_keys=True,
            ),
            step_configuration=full_step_config.json(sort_keys=True),
            caching_parameters=json.dumps(
                step_config.caching_parameters,
                default=pydantic_encoder,
                sort_keys=True,
            ),
            docstring=request.docstring,
            source_code=request.source_code,
            num_outputs=len(step_config.outputs),
            status=request.status,
        )

    def to_model(
        self,
        parent_step_ids: List[UUID],
        input_artifacts: Dict[str, "ArtifactResponseModel"],
        output_artifacts: Dict[str, "ArtifactResponseModel"],
    ) -> StepRunResponseModel:
        """Convert a `StepRunSchema` to a `StepRunModel`.

        Args:
            parent_step_ids: The parent step ids to link to the step.
            input_artifacts: The input artifacts to link to the step.
            output_artifacts: The output artifacts to link to the step.

        Returns:
            The created StepRunModel.
        """
        metadata = {
            metadata_schema.key: metadata_schema.to_model()
            for metadata_schema in self.run_metadata
        }
        full_step_config = Step.parse_raw(self.step_configuration)
        return StepRunResponseModel(
            id=self.id,
            name=self.name,
            pipeline_run_id=self.pipeline_run_id,
            original_step_run_id=self.original_step_run_id,
            workspace=self.workspace.to_model(),
            user=self.user.to_model() if self.user else None,
            parent_step_ids=parent_step_ids,
            cache_key=self.cache_key,
            start_time=self.start_time,
            end_time=self.end_time,
            config=full_step_config.config,
            spec=full_step_config.spec,
            status=self.status,
            docstring=self.docstring,
            source_code=self.source_code,
            created=self.created,
            updated=self.updated,
            inputs=input_artifacts,
            outputs=output_artifacts,
            metadata=metadata,
            logs=self.logs.to_model() if self.logs else None,
        )

    def update(self, step_update: StepRunUpdateModel) -> "StepRunSchema":
        """Update a step run schema with a step run update model.

        Args:
            step_update: The step run update model.

        Returns:
            The updated step run schema.
        """
        for key, value in step_update.dict(
            exclude_unset=True, exclude_none=True
        ).items():
            if key == "status":
                self.status = value
            if key == "end_time":
                self.end_time = value

        self.updated = datetime.utcnow()

        return self
from_request(request) classmethod

Create a step run schema from a step run request model.

Parameters:

Name Type Description Default
request StepRunRequestModel

The step run request model.

required

Returns:

Type Description
StepRunSchema

The step run schema.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
@classmethod
def from_request(cls, request: StepRunRequestModel) -> "StepRunSchema":
    """Create a step run schema from a step run request model.

    Args:
        request: The step run request model.

    Returns:
        The step run schema.
    """
    step_config = request.config
    full_step_config = Step(spec=request.spec, config=request.config)
    return cls(
        name=request.name,
        pipeline_run_id=request.pipeline_run_id,
        original_step_run_id=request.original_step_run_id,
        workspace_id=request.workspace,
        user_id=request.user,
        enable_cache=step_config.enable_cache,
        enable_artifact_metadata=step_config.enable_artifact_metadata,
        code_hash=step_config.caching_parameters.get(
            STEP_SOURCE_PARAMETER_NAME
        ),
        cache_key=request.cache_key,
        start_time=request.start_time,
        end_time=request.end_time,
        entrypoint_name=step_config.name,
        parameters=json.dumps(
            step_config.parameters,
            default=pydantic_encoder,
            sort_keys=True,
        ),
        step_configuration=full_step_config.json(sort_keys=True),
        caching_parameters=json.dumps(
            step_config.caching_parameters,
            default=pydantic_encoder,
            sort_keys=True,
        ),
        docstring=request.docstring,
        source_code=request.source_code,
        num_outputs=len(step_config.outputs),
        status=request.status,
    )
to_model(self, parent_step_ids, input_artifacts, output_artifacts)

Convert a StepRunSchema to a StepRunModel.

Parameters:

Name Type Description Default
parent_step_ids List[uuid.UUID]

The parent step ids to link to the step.

required
input_artifacts Dict[str, ArtifactResponseModel]

The input artifacts to link to the step.

required
output_artifacts Dict[str, ArtifactResponseModel]

The output artifacts to link to the step.

required

Returns:

Type Description
StepRunResponseModel

The created StepRunModel.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
def to_model(
    self,
    parent_step_ids: List[UUID],
    input_artifacts: Dict[str, "ArtifactResponseModel"],
    output_artifacts: Dict[str, "ArtifactResponseModel"],
) -> StepRunResponseModel:
    """Convert a `StepRunSchema` to a `StepRunModel`.

    Args:
        parent_step_ids: The parent step ids to link to the step.
        input_artifacts: The input artifacts to link to the step.
        output_artifacts: The output artifacts to link to the step.

    Returns:
        The created StepRunModel.
    """
    metadata = {
        metadata_schema.key: metadata_schema.to_model()
        for metadata_schema in self.run_metadata
    }
    full_step_config = Step.parse_raw(self.step_configuration)
    return StepRunResponseModel(
        id=self.id,
        name=self.name,
        pipeline_run_id=self.pipeline_run_id,
        original_step_run_id=self.original_step_run_id,
        workspace=self.workspace.to_model(),
        user=self.user.to_model() if self.user else None,
        parent_step_ids=parent_step_ids,
        cache_key=self.cache_key,
        start_time=self.start_time,
        end_time=self.end_time,
        config=full_step_config.config,
        spec=full_step_config.spec,
        status=self.status,
        docstring=self.docstring,
        source_code=self.source_code,
        created=self.created,
        updated=self.updated,
        inputs=input_artifacts,
        outputs=output_artifacts,
        metadata=metadata,
        logs=self.logs.to_model() if self.logs else None,
    )
update(self, step_update)

Update a step run schema with a step run update model.

Parameters:

Name Type Description Default
step_update StepRunUpdateModel

The step run update model.

required

Returns:

Type Description
StepRunSchema

The updated step run schema.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
def update(self, step_update: StepRunUpdateModel) -> "StepRunSchema":
    """Update a step run schema with a step run update model.

    Args:
        step_update: The step run update model.

    Returns:
        The updated step run schema.
    """
    for key, value in step_update.dict(
        exclude_unset=True, exclude_none=True
    ).items():
        if key == "status":
            self.status = value
        if key == "end_time":
            self.end_time = value

    self.updated = datetime.utcnow()

    return self

team_schemas

SQLModel implementation of team tables.

TeamAssignmentSchema (SQLModel) pydantic-model

SQL Model for team assignments.

Source code in zenml/zen_stores/schemas/team_schemas.py
class TeamAssignmentSchema(SQLModel, table=True):
    """SQL Model for team assignments."""

    __tablename__ = "team_assignment"

    user_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="user",  # TODO: how to reference `UserSchema.__tablename__`?
        source_column="user_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    team_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="team",  # TODO: how to reference `TeamSchema.__tablename__`?
        source_column="team_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
TeamSchema (NamedSchema) pydantic-model

SQL Model for teams.

Source code in zenml/zen_stores/schemas/team_schemas.py
class TeamSchema(NamedSchema, table=True):
    """SQL Model for teams."""

    __tablename__ = "team"

    users: List["UserSchema"] = Relationship(
        back_populates="teams", link_model=TeamAssignmentSchema
    )
    assigned_roles: List["TeamRoleAssignmentSchema"] = Relationship(
        back_populates="team", sa_relationship_kwargs={"cascade": "delete"}
    )

    def update(self, team_update: TeamUpdateModel) -> "TeamSchema":
        """Update a `TeamSchema` with a `TeamUpdateModel`.

        Args:
            team_update: The `TeamUpdateModel` to update the schema with.

        Returns:
            The updated `TeamSchema`.
        """
        for field, value in team_update.dict(exclude_unset=True).items():
            if field == "users":
                pass
            else:
                setattr(self, field, value)

        self.updated = datetime.utcnow()
        return self

    def to_model(self, _block_recursion: bool = False) -> TeamResponseModel:
        """Convert a `TeamSchema` to a `TeamResponseModel`.

        Args:
            _block_recursion: Don't recursively fill attributes

        Returns:
            The converted `TeamResponseModel`.
        """
        if _block_recursion:
            return TeamResponseModel(
                id=self.id,
                name=self.name,
                created=self.created,
                updated=self.updated,
                users=[],
            )
        else:
            return TeamResponseModel(
                id=self.id,
                name=self.name,
                created=self.created,
                updated=self.updated,
                users=[u.to_model(_block_recursion=False) for u in self.users],
            )
to_model(self, _block_recursion=False)

Convert a TeamSchema to a TeamResponseModel.

Parameters:

Name Type Description Default
_block_recursion bool

Don't recursively fill attributes

False

Returns:

Type Description
TeamResponseModel

The converted TeamResponseModel.

Source code in zenml/zen_stores/schemas/team_schemas.py
def to_model(self, _block_recursion: bool = False) -> TeamResponseModel:
    """Convert a `TeamSchema` to a `TeamResponseModel`.

    Args:
        _block_recursion: Don't recursively fill attributes

    Returns:
        The converted `TeamResponseModel`.
    """
    if _block_recursion:
        return TeamResponseModel(
            id=self.id,
            name=self.name,
            created=self.created,
            updated=self.updated,
            users=[],
        )
    else:
        return TeamResponseModel(
            id=self.id,
            name=self.name,
            created=self.created,
            updated=self.updated,
            users=[u.to_model(_block_recursion=False) for u in self.users],
        )
update(self, team_update)

Update a TeamSchema with a TeamUpdateModel.

Parameters:

Name Type Description Default
team_update TeamUpdateModel

The TeamUpdateModel to update the schema with.

required

Returns:

Type Description
TeamSchema

The updated TeamSchema.

Source code in zenml/zen_stores/schemas/team_schemas.py
def update(self, team_update: TeamUpdateModel) -> "TeamSchema":
    """Update a `TeamSchema` with a `TeamUpdateModel`.

    Args:
        team_update: The `TeamUpdateModel` to update the schema with.

    Returns:
        The updated `TeamSchema`.
    """
    for field, value in team_update.dict(exclude_unset=True).items():
        if field == "users":
            pass
        else:
            setattr(self, field, value)

    self.updated = datetime.utcnow()
    return self

user_schemas

SQLModel implementation of user tables.

UserSchema (NamedSchema) pydantic-model

SQL Model for users.

Source code in zenml/zen_stores/schemas/user_schemas.py
class UserSchema(NamedSchema, table=True):
    """SQL Model for users."""

    __tablename__ = "user"

    full_name: str
    email: Optional[str] = Field(nullable=True)
    active: bool
    password: Optional[str] = Field(nullable=True)
    activation_token: Optional[str] = Field(nullable=True)
    hub_token: Optional[str] = Field(nullable=True)
    email_opted_in: Optional[bool] = Field(nullable=True)

    teams: List["TeamSchema"] = Relationship(
        back_populates="users", link_model=TeamAssignmentSchema
    )
    assigned_roles: List["UserRoleAssignmentSchema"] = Relationship(
        back_populates="user", sa_relationship_kwargs={"cascade": "delete"}
    )
    stacks: List["StackSchema"] = Relationship(back_populates="user")
    components: List["StackComponentSchema"] = Relationship(
        back_populates="user",
    )
    flavors: List["FlavorSchema"] = Relationship(back_populates="user")
    pipelines: List["PipelineSchema"] = Relationship(back_populates="user")
    schedules: List["ScheduleSchema"] = Relationship(
        back_populates="user",
    )
    runs: List["PipelineRunSchema"] = Relationship(back_populates="user")
    step_runs: List["StepRunSchema"] = Relationship(back_populates="user")
    builds: List["PipelineBuildSchema"] = Relationship(back_populates="user")
    artifacts: List["ArtifactSchema"] = Relationship(back_populates="user")
    run_metadata: List["RunMetadataSchema"] = Relationship(
        back_populates="user"
    )
    secrets: List["SecretSchema"] = Relationship(
        back_populates="user",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    deployments: List["PipelineDeploymentSchema"] = Relationship(
        back_populates="user",
    )
    code_repositories: List["CodeRepositorySchema"] = Relationship(
        back_populates="user",
    )
    service_connectors: List["ServiceConnectorSchema"] = Relationship(
        back_populates="user",
    )

    @classmethod
    def from_request(cls, model: UserRequestModel) -> "UserSchema":
        """Create a `UserSchema` from a `UserModel`.

        Args:
            model: The `UserModel` from which to create the schema.

        Returns:
            The created `UserSchema`.
        """
        return cls(
            name=model.name,
            full_name=model.full_name,
            active=model.active,
            password=model.create_hashed_password(),
            activation_token=model.create_hashed_activation_token(),
        )

    def update(self, user_update: UserUpdateModel) -> "UserSchema":
        """Update a `UserSchema` from a `UserUpdateModel`.

        Args:
            user_update: The `UserUpdateModel` from which to update the schema.

        Returns:
            The updated `UserSchema`.
        """
        for field, value in user_update.dict(exclude_unset=True).items():
            if field == "password":
                setattr(self, field, user_update.create_hashed_password())
            elif field == "activation_token":
                setattr(
                    self, field, user_update.create_hashed_activation_token()
                )
            else:
                setattr(self, field, value)

        self.updated = datetime.utcnow()
        return self

    def to_model(
        self, _block_recursion: bool = False, include_private: bool = False
    ) -> UserResponseModel:
        """Convert a `UserSchema` to a `UserResponseModel`.

        Args:
            _block_recursion: Don't recursively fill attributes
            include_private: Whether to include the user private information
                             this is to limit the amount of data one can get
                             about other users

        Returns:
            The converted `UserResponseModel`.
        """
        if _block_recursion:
            return UserResponseModel(
                id=self.id,
                name=self.name,
                active=self.active,
                email_opted_in=self.email_opted_in,
                email=self.email if include_private else None,
                hub_token=self.hub_token if include_private else None,
                full_name=self.full_name,
                created=self.created,
                updated=self.updated,
            )
        else:
            return UserResponseModel(
                id=self.id,
                name=self.name,
                active=self.active,
                email_opted_in=self.email_opted_in,
                email=self.email if include_private else None,
                hub_token=self.hub_token if include_private else None,
                teams=[t.to_model(_block_recursion=True) for t in self.teams],
                full_name=self.full_name,
                created=self.created,
                updated=self.updated,
                roles=[ra.role.to_model() for ra in self.assigned_roles],
            )
from_request(model) classmethod

Create a UserSchema from a UserModel.

Parameters:

Name Type Description Default
model UserRequestModel

The UserModel from which to create the schema.

required

Returns:

Type Description
UserSchema

The created UserSchema.

Source code in zenml/zen_stores/schemas/user_schemas.py
@classmethod
def from_request(cls, model: UserRequestModel) -> "UserSchema":
    """Create a `UserSchema` from a `UserModel`.

    Args:
        model: The `UserModel` from which to create the schema.

    Returns:
        The created `UserSchema`.
    """
    return cls(
        name=model.name,
        full_name=model.full_name,
        active=model.active,
        password=model.create_hashed_password(),
        activation_token=model.create_hashed_activation_token(),
    )
to_model(self, _block_recursion=False, include_private=False)

Convert a UserSchema to a UserResponseModel.

Parameters:

Name Type Description Default
_block_recursion bool

Don't recursively fill attributes

False
include_private bool

Whether to include the user private information this is to limit the amount of data one can get about other users

False

Returns:

Type Description
UserResponseModel

The converted UserResponseModel.

Source code in zenml/zen_stores/schemas/user_schemas.py
def to_model(
    self, _block_recursion: bool = False, include_private: bool = False
) -> UserResponseModel:
    """Convert a `UserSchema` to a `UserResponseModel`.

    Args:
        _block_recursion: Don't recursively fill attributes
        include_private: Whether to include the user private information
                         this is to limit the amount of data one can get
                         about other users

    Returns:
        The converted `UserResponseModel`.
    """
    if _block_recursion:
        return UserResponseModel(
            id=self.id,
            name=self.name,
            active=self.active,
            email_opted_in=self.email_opted_in,
            email=self.email if include_private else None,
            hub_token=self.hub_token if include_private else None,
            full_name=self.full_name,
            created=self.created,
            updated=self.updated,
        )
    else:
        return UserResponseModel(
            id=self.id,
            name=self.name,
            active=self.active,
            email_opted_in=self.email_opted_in,
            email=self.email if include_private else None,
            hub_token=self.hub_token if include_private else None,
            teams=[t.to_model(_block_recursion=True) for t in self.teams],
            full_name=self.full_name,
            created=self.created,
            updated=self.updated,
            roles=[ra.role.to_model() for ra in self.assigned_roles],
        )
update(self, user_update)

Update a UserSchema from a UserUpdateModel.

Parameters:

Name Type Description Default
user_update UserUpdateModel

The UserUpdateModel from which to update the schema.

required

Returns:

Type Description
UserSchema

The updated UserSchema.

Source code in zenml/zen_stores/schemas/user_schemas.py
def update(self, user_update: UserUpdateModel) -> "UserSchema":
    """Update a `UserSchema` from a `UserUpdateModel`.

    Args:
        user_update: The `UserUpdateModel` from which to update the schema.

    Returns:
        The updated `UserSchema`.
    """
    for field, value in user_update.dict(exclude_unset=True).items():
        if field == "password":
            setattr(self, field, user_update.create_hashed_password())
        elif field == "activation_token":
            setattr(
                self, field, user_update.create_hashed_activation_token()
            )
        else:
            setattr(self, field, value)

    self.updated = datetime.utcnow()
    return self

workspace_schemas

SQL Model Implementations for Workspaces.

WorkspaceSchema (NamedSchema) pydantic-model

SQL Model for workspaces.

Source code in zenml/zen_stores/schemas/workspace_schemas.py
class WorkspaceSchema(NamedSchema, table=True):
    """SQL Model for workspaces."""

    __tablename__ = "workspace"

    description: str

    user_role_assignments: List["UserRoleAssignmentSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    team_role_assignments: List["TeamRoleAssignmentSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "all, delete"},
    )
    stacks: List["StackSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    components: List["StackComponentSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    flavors: List["FlavorSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    pipelines: List["PipelineSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    schedules: List["ScheduleSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    runs: List["PipelineRunSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    step_runs: List["StepRunSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    builds: List["PipelineBuildSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    artifacts: List["ArtifactSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    run_metadata: List["RunMetadataSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    secrets: List["SecretSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    deployments: List["PipelineDeploymentSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    code_repositories: List["CodeRepositorySchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )
    service_connectors: List["ServiceConnectorSchema"] = Relationship(
        back_populates="workspace",
        sa_relationship_kwargs={"cascade": "delete"},
    )

    @classmethod
    def from_request(
        cls, workspace: WorkspaceRequestModel
    ) -> "WorkspaceSchema":
        """Create a `WorkspaceSchema` from a `WorkspaceResponseModel`.

        Args:
            workspace: The `WorkspaceResponseModel` from which to create the schema.

        Returns:
            The created `WorkspaceSchema`.
        """
        return cls(name=workspace.name, description=workspace.description)

    def update(
        self, workspace_update: WorkspaceUpdateModel
    ) -> "WorkspaceSchema":
        """Update a `WorkspaceSchema` from a `WorkspaceUpdateModel`.

        Args:
            workspace_update: The `WorkspaceUpdateModel` from which to update the
                schema.

        Returns:
            The updated `WorkspaceSchema`.
        """
        for field, value in workspace_update.dict(exclude_unset=True).items():
            setattr(self, field, value)

        self.updated = datetime.utcnow()
        return self

    def to_model(self) -> WorkspaceResponseModel:
        """Convert a `WorkspaceSchema` to a `WorkspaceResponseModel`.

        Returns:
            The converted `WorkspaceResponseModel`.
        """
        return WorkspaceResponseModel(
            id=self.id,
            name=self.name,
            description=self.description,
            created=self.created,
            updated=self.updated,
        )
from_request(workspace) classmethod

Create a WorkspaceSchema from a WorkspaceResponseModel.

Parameters:

Name Type Description Default
workspace WorkspaceRequestModel

The WorkspaceResponseModel from which to create the schema.

required

Returns:

Type Description
WorkspaceSchema

The created WorkspaceSchema.

Source code in zenml/zen_stores/schemas/workspace_schemas.py
@classmethod
def from_request(
    cls, workspace: WorkspaceRequestModel
) -> "WorkspaceSchema":
    """Create a `WorkspaceSchema` from a `WorkspaceResponseModel`.

    Args:
        workspace: The `WorkspaceResponseModel` from which to create the schema.

    Returns:
        The created `WorkspaceSchema`.
    """
    return cls(name=workspace.name, description=workspace.description)
to_model(self)

Convert a WorkspaceSchema to a WorkspaceResponseModel.

Returns:

Type Description
WorkspaceResponseModel

The converted WorkspaceResponseModel.

Source code in zenml/zen_stores/schemas/workspace_schemas.py
def to_model(self) -> WorkspaceResponseModel:
    """Convert a `WorkspaceSchema` to a `WorkspaceResponseModel`.

    Returns:
        The converted `WorkspaceResponseModel`.
    """
    return WorkspaceResponseModel(
        id=self.id,
        name=self.name,
        description=self.description,
        created=self.created,
        updated=self.updated,
    )
update(self, workspace_update)

Update a WorkspaceSchema from a WorkspaceUpdateModel.

Parameters:

Name Type Description Default
workspace_update WorkspaceUpdateModel

The WorkspaceUpdateModel from which to update the schema.

required

Returns:

Type Description
WorkspaceSchema

The updated WorkspaceSchema.

Source code in zenml/zen_stores/schemas/workspace_schemas.py
def update(
    self, workspace_update: WorkspaceUpdateModel
) -> "WorkspaceSchema":
    """Update a `WorkspaceSchema` from a `WorkspaceUpdateModel`.

    Args:
        workspace_update: The `WorkspaceUpdateModel` from which to update the
            schema.

    Returns:
        The updated `WorkspaceSchema`.
    """
    for field, value in workspace_update.dict(exclude_unset=True).items():
        setattr(self, field, value)

    self.updated = datetime.utcnow()
    return self

secrets_stores special

Centralized secrets management.

aws_secrets_store

AWS Secrets Store implementation.

AWSSecretsStore (BaseSecretsStore) pydantic-model

Secrets store implementation that uses the AWS Secrets Manager API.

This secrets store implementation uses the AWS Secrets Manager API to store secrets. It allows a single AWS Secrets Manager region "instance" to be shared with other ZenML deployments as well as other third party users and applications.

Here are some implementation highlights:

  • the name/ID of an AWS secret is derived from the ZenML secret UUID and a zenml prefix in the form zenml/{zenml_secret_uuid}. This clearly identifies a secret as being managed by ZenML in the AWS console.

  • the Secrets Store also makes heavy use of AWS secret tags to store all the metadata associated with a ZenML secret (e.g. the secret name, scope, user and workspace) and to filter secrets by these metadata. The zenml tag in particular is used to identify and group all secrets that belong to the same ZenML deployment.

  • all secret key-values configured in a ZenML secret are stored as a single JSON string value in the AWS secret value.

  • when a user or workspace is deleted, the secrets associated with it are deleted automatically via registered event handlers.

Known challenges and limitations:

  • there is a known problem with the AWS Secrets Manager API that can cause the list_secrets method to return stale data for a long time (seconds) after a secret is created or updated. The AWS secrets store tries to mitigate this problem by waiting for a maximum configurable number of seconds after creating or updating a secret until the changes are reflected in the list_secrets AWS content. However, this is not a perfect solution, because it blocks ZenML server API threads while waiting. This can be disabled by setting the secret_list_refresh_timeout configuration parameter to zero.

  • only updating the secret values is reflected in the secret's updated timestamp. Updating the secret metadata (e.g. name, scope, user or workspace) does not update the secret's updated timestamp. This is a limitation of the AWS Secrets Manager API (updating AWS tags does not update the secret's updated timestamp).

Attributes:

Name Type Description
config

The configuration of the AWS secrets store.

TYPE

The type of the store.

CONFIG_TYPE

The type of the store configuration.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
class AWSSecretsStore(BaseSecretsStore):
    """Secrets store implementation that uses the AWS Secrets Manager API.

    This secrets store implementation uses the AWS Secrets Manager API to
    store secrets. It allows a single AWS Secrets Manager region "instance" to
    be shared with other ZenML deployments as well as other third party users
    and applications.

    Here are some implementation highlights:

    * the name/ID of an AWS secret is derived from the ZenML secret UUID and a
    `zenml` prefix in the form `zenml/{zenml_secret_uuid}`. This clearly
    identifies a secret as being managed by ZenML in the AWS console.

    * the Secrets Store also makes heavy use of AWS secret tags to store all the
    metadata associated with a ZenML secret (e.g. the secret name, scope, user
    and workspace) and to filter secrets by these metadata. The `zenml` tag in
    particular is used to identify and group all secrets that belong to the same
    ZenML deployment.

    * all secret key-values configured in a ZenML secret are stored as a single
    JSON string value in the AWS secret value.

    * when a user or workspace is deleted, the secrets associated with it are
    deleted automatically via registered event handlers.


    Known challenges and limitations:

    * there is a known problem with the AWS Secrets Manager API that can cause
    the `list_secrets` method to return stale data for a long time (seconds)
    after a secret is created or updated. The AWS secrets store tries to
    mitigate this problem by waiting for a maximum configurable number of
    seconds after creating or updating a secret until the changes are reflected
    in the `list_secrets` AWS content. However, this is not a perfect solution,
    because it blocks ZenML server API threads while waiting. This can be
    disabled by setting the `secret_list_refresh_timeout` configuration
    parameter to zero.

    * only updating the secret values is reflected in the secret's `updated`
    timestamp. Updating the secret metadata (e.g. name, scope, user or
    workspace) does not update the secret's `updated` timestamp. This is a
    limitation of the AWS Secrets Manager API (updating AWS tags does not update
    the secret's `updated` timestamp).


    Attributes:
        config: The configuration of the AWS secrets store.
        TYPE: The type of the store.
        CONFIG_TYPE: The type of the store configuration.
    """

    config: AWSSecretsStoreConfiguration
    TYPE: ClassVar[SecretsStoreType] = SecretsStoreType.AWS
    CONFIG_TYPE: ClassVar[
        Type[SecretsStoreConfiguration]
    ] = AWSSecretsStoreConfiguration

    _client: Optional[Any] = None

    @property
    def client(self) -> Any:
        """Initialize and return the AWS Secrets Manager client.

        Returns:
            The AWS Secrets Manager client.
        """
        if self._client is None:
            # Initialize the AWS Secrets Manager client with the
            # credentials from the configuration, if provided.
            self._client = boto3.client(
                "secretsmanager",
                region_name=self.config.region_name,
                aws_access_key_id=self.config.aws_access_key_id.get_secret_value()
                if self.config.aws_access_key_id
                else None,
                aws_secret_access_key=self.config.aws_secret_access_key.get_secret_value()
                if self.config.aws_secret_access_key
                else None,
                aws_session_token=self.config.aws_session_token.get_secret_value()
                if self.config.aws_session_token
                else None,
            )
        return self._client

    # ====================================
    # Secrets Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the AWS secrets store."""
        logger.debug("Initializing AWSSecretsStore")

        # Initialize the AWS client early, just to catch any configuration or
        # authentication errors early, before the Secrets Store is used.
        _ = self.client

    # ------
    # Secrets
    # ------

    @staticmethod
    def _validate_aws_secret_name(name: str) -> None:
        """Validate a secret name.

        AWS secret names must contain only alphanumeric characters and the
        characters /_+=.@-. The `/` character is only used internally to
        implement the global namespace sharing scheme.

        Given that the ZenML secret name is stored as an AWS secret tag, the
        maximum value length is also restricted to 255 characters.

        Args:
            name: the secret name

        Raises:
            ValueError: if the secret name is invalid
        """
        if not re.fullmatch(r"[a-zA-Z0-9_+=\.@\-]*", name):
            raise ValueError(
                f"Invalid secret name '{name}'. Must contain only alphanumeric "
                f"characters and the characters _+=.@-."
            )

        if len(name) > 255:
            raise ValueError(
                f"Invalid secret name '{name}'. The maximum length is 255 "
                f"characters."
            )

    @staticmethod
    def _get_aws_secret_id(
        secret_id: UUID,
    ) -> str:
        """Get the AWS secret ID corresponding to a ZenML secret ID.

        The convention used for AWS secret names is to use the ZenML
        secret UUID prefixed with `zenml` as the AWS secret name,
        i.e. `zenml/<secret_uuid>`.

        Args:
            secret_id: The ZenML secret ID.

        Returns:
            The AWS secret name.
        """
        return f"{AWS_ZENML_SECRET_NAME_PREFIX}/{str(secret_id)}"

    def _convert_aws_secret(
        self,
        tags: List[Dict[str, str]],
        created: datetime,
        updated: datetime,
        values: Optional[str] = None,
    ) -> SecretResponseModel:
        """Create a ZenML secret model from data stored in an AWS secret.

        If the AWS secret cannot be converted, the method acts as if the
        secret does not exist and raises a KeyError.

        Args:
            tags: The AWS secret tags.
            created: The AWS secret creation time.
            updated: The AWS secret last updated time.
            values: The AWS secret values encoded as a JSON string (optional).

        Returns:
            The ZenML secret.
        """
        # Convert the AWS secret tags to a metadata dictionary.
        metadata: Dict[str, str] = {tag["Key"]: tag["Value"] for tag in tags}

        return self._create_secret_from_metadata(
            metadata=metadata,
            created=created,
            updated=updated,
            values=json.loads(values) if values else None,
        )

    @staticmethod
    def _get_aws_secret_tags(
        metadata: Dict[str, str],
    ) -> List[Dict[str, str]]:
        """Convert ZenML secret metadata to AWS secret tags.

        Args:
            metadata: The ZenML secret metadata.

        Returns:
            The AWS secret tags.
        """
        aws_tags: List[Dict[str, str]] = []
        for k, v in metadata.items():
            aws_tags.append(
                {
                    "Key": k,
                    "Value": str(v),
                }
            )

        return aws_tags

    @staticmethod
    def _get_aws_secret_filters(
        metadata: Dict[str, str],
    ) -> List[Dict[str, str]]:
        """Convert ZenML secret metadata to AWS secret filters.

        Args:
            metadata: The ZenML secret metadata.

        Returns:
            The AWS secret filters.
        """
        aws_filters: List[Dict[str, Any]] = []
        for k, v in metadata.items():
            aws_filters.append(
                {
                    "Key": "tag-key",
                    "Values": [
                        k,
                    ],
                }
            )
            aws_filters.append(
                {
                    "Key": "tag-value",
                    "Values": [
                        str(v),
                    ],
                }
            )

        return aws_filters

    def _wait_for_secret_to_propagate(
        self, aws_secret_id: str, tags: List[Dict[str, str]]
    ) -> None:
        """Wait for an AWS secret to be refreshed in the list of secrets.

        The AWS Secrets Manager does not immediately reflect newly created
        and updated secrets in the `list_secrets` API. It is important that we
        wait for the secret to be refreshed in the `list_secrets` API
        before returning from create_secret/update_secret, otherwise the secret
        will not be available to the user. We also rely on `list_secrets`
        to enforce the scope rules, but given that the ZenML server runs
        requests in separate threads, it is not entirely possible to
        guarantee them.

        Args:
            aws_secret_id: The AWS secret ID.
            tags: The AWS secret tags that are expected to be present in the
                `list_secrets` response.
        """
        if self.config.secret_list_refresh_timeout <= 0:
            return

        # We wait for the secret to be available in the `list_secrets` API.
        for _ in range(self.config.secret_list_refresh_timeout):
            logger.debug(f"Waiting for secret {aws_secret_id} to be listed...")
            secret_exists = False
            try:
                secrets = self.client.list_secrets(
                    Filters=[{"Key": "name", "Values": [aws_secret_id]}]
                )
                if len(secrets["SecretList"]) > 0:
                    listed_secret = secrets["SecretList"][0]
                    # The supplied tags must exactly match those reported in the
                    # `list_secrets` response.
                    secret_exists = all(
                        tag in listed_secret["Tags"] for tag in tags
                    )
            except ClientError as e:
                logger.warning(
                    f"Error checking if secret {aws_secret_id} is listed: "
                    f"{e}. Retrying..."
                )

            if not secret_exists:
                logger.debug(
                    f"Secret {aws_secret_id} not yet listed. Retrying..."
                )
                time.sleep(1)
            else:
                logger.debug(f"Secret {aws_secret_id} listed.")
                break
        else:
            logger.warning(
                f"Secret {aws_secret_id} not updated in `list_secrets` "
                f"after {self.config.secret_list_refresh_timeout} seconds. "
            )

    @track(AnalyticsEvent.CREATED_SECRET, v2=True)
    def create_secret(self, secret: SecretRequestModel) -> SecretResponseModel:
        """Creates a new secret.

        The new secret is also validated against the scoping rules enforced in
        the secrets store:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret: The secret to create.

        Returns:
            The newly created secret.

        Raises:
            EntityExistsError: If a secret with the same name already exists
                in the same scope.
            RuntimeError: If the AWS Secrets Manager API returns an unexpected
                error.
        """
        self._validate_aws_secret_name(secret.name)
        user, workspace = self._validate_user_and_workspace(
            secret.user, secret.workspace
        )

        # Check if a secret with the same name already exists in the same
        # scope.
        secret_exists, msg = self._check_secret_scope(
            secret_name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace,
            user=secret.user,
        )
        if secret_exists:
            raise EntityExistsError(msg)

        # Generate a new UUID for the secret
        secret_id = uuid.uuid4()
        aws_secret_id = self._get_aws_secret_id(secret_id)
        secret_value = json.dumps(secret.secret_values)

        # Convert the ZenML secret metadata to AWS tags
        metadata = self._get_secret_metadata_for_secret(
            secret, secret_id=secret_id
        )
        tags = self._get_aws_secret_tags(metadata)

        try:
            self.client.create_secret(
                Name=aws_secret_id,
                SecretString=secret_value,
                Tags=tags,
            )
            # We need a separate AWS API call to get the secret creation
            # date, since the create_secret API does not return it.
            describe_secret_response = self.client.describe_secret(
                SecretId=aws_secret_id
            )
        except ClientError as e:
            raise RuntimeError(f"Error creating secret: {e}")

        logger.debug("Created AWS secret: %s", aws_secret_id)

        self._wait_for_secret_to_propagate(aws_secret_id, tags=tags)

        secret_model = SecretResponseModel(
            id=secret_id,
            name=secret.name,
            scope=secret.scope,
            workspace=workspace,
            user=user,
            values=secret.secret_values,
            created=describe_secret_response["CreatedDate"],
            updated=describe_secret_response["LastChangedDate"],
        )

        return secret_model

    def get_secret(self, secret_id: UUID) -> SecretResponseModel:
        """Get a secret by ID.

        Args:
            secret_id: The ID of the secret to fetch.

        Returns:
            The secret.

        Raises:
            KeyError: If the secret does not exist.
            RuntimeError: If the AWS Secrets Manager API returns an unexpected
                error.
        """
        aws_secret_id = self._get_aws_secret_id(secret_id)

        try:
            get_secret_value_response = self.client.get_secret_value(
                SecretId=aws_secret_id
            )
            # We need a separate AWS API call to get the AWS secret tags which
            # contain the ZenML secret metadata, since the get_secret_ value API
            # does not return them.
            describe_secret_response = self.client.describe_secret(
                SecretId=aws_secret_id
            )
        except ClientError as e:
            if e.response["Error"]["Code"] == "ResourceNotFoundException":
                raise KeyError(f"Secret with ID {secret_id} not found")

            if (
                e.response["Error"]["Code"] == "InvalidRequestException"
                and "marked for deletion" in e.response["Error"]["Message"]
            ):
                raise KeyError(f"Secret with ID {secret_id} not found")

            raise RuntimeError(
                f"Error fetching secret with ID {secret_id} {e}"
            )

        # The _convert_aws_secret method raises a KeyError if the
        # secret is tied to a workspace or user that no longer exists. Here we
        # simply pass the exception up the stack, as if the secret was not found
        # in the first place, knowing that it will be cascade-deleted soon.
        return self._convert_aws_secret(
            tags=describe_secret_response["Tags"],
            created=describe_secret_response["CreatedDate"],
            updated=describe_secret_response["LastChangedDate"],
            values=get_secret_value_response["SecretString"],
        )

    def list_secrets(
        self, secret_filter_model: SecretFilterModel
    ) -> Page[SecretResponseModel]:
        """List all secrets matching the given filter criteria.

        Note that returned secrets do not include any secret values. To fetch
        the secret values, use `get_secret`.

        Args:
            secret_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all secrets matching the filter criteria, with pagination
            information and sorted according to the filter criteria. The
            returned secrets do not include any secret values, only metadata. To
            fetch the secret values, use `get_secret` individually with each
            secret.

        Raises:
            ValueError: If the filter contains an out-of-bounds page number.
            RuntimeError: If the AWS Secrets Manager API returns an unexpected
                error.
        """
        # The AWS Secrets Manager API does not natively support the entire
        # range of filtering, sorting and pagination options that ZenML
        # supports. The implementation of this method is therefore a bit
        # involved. We try to make use of the AWS filtering API as much as
        # possible to reduce the number of secrets that we need to fetch, then
        # we apply the rest of filtering, sorting and pagination on
        # the client side.

        metadata_args: Dict[str, Any] = {}
        if secret_filter_model.logical_operator == LogicalOperators.AND:
            # We can only filter on the AWS server side if we have an AND
            # logical operator. Otherwise, we need to filter on the client
            # side.

            for filter in secret_filter_model.list_of_filters:
                # The AWS Secrets Manager API only supports prefix matching. We
                # take advantage of this to filter as much as possible on the
                # AWS server side and we leave the rest to the client.
                if filter.operation not in [
                    GenericFilterOps.EQUALS,
                    GenericFilterOps.STARTSWITH,
                ]:
                    continue

                if filter.column == "id":
                    metadata_args["secret_id"] = UUID(filter.value)
                elif filter.column == "name":
                    metadata_args["secret_name"] = filter.value
                elif filter.column == "scope":
                    metadata_args["scope"] = SecretScope(filter.value)
                elif filter.column == "workspace_id":
                    metadata_args["workspace"] = UUID(filter.value)
                elif filter.column == "user_id":
                    metadata_args["user"] = UUID(filter.value)
                else:
                    # AWS doesn't support filtering on the created/updated
                    # timestamps, so we'll have to do that on the client side.
                    continue

        # The metadata will always contain at least the filter criteria
        # required to exclude everything but AWS secrets that belong to the
        # current ZenML deployment.
        metadata = self._get_secret_metadata(**metadata_args)
        aws_filters = self._get_aws_secret_filters(metadata)

        results: List[SecretResponseModel] = []

        try:
            # AWS Secrets Manager API pagination is wrapped around the
            # `list_secrets` method call. We use it because we need to fetch all
            # secrets matching the (partial) filter that we set up. Note that
            # the pagination used here has nothing to do with the pagination
            # that we do for the method caller.
            paginator = self.client.get_paginator("list_secrets")
            pages = paginator.paginate(
                Filters=aws_filters,
                PaginationConfig={
                    "PageSize": self.config.list_page_size,
                },
            )

            for page in pages:
                for secret in page["SecretList"]:
                    try:
                        # NOTE: we do not include the secret values in the
                        # response. We would need a separate API call to fetch
                        # them for each secret, which would be very inefficient
                        # anyway.
                        secret_model = self._convert_aws_secret(
                            tags=secret["Tags"],
                            created=secret["CreatedDate"],
                            updated=secret["LastChangedDate"],
                        )
                    except KeyError:
                        # The _convert_aws_secret method raises a KeyError
                        # if the secret is tied to a workspace or user that no
                        # longer exists. Here we pretend that the secret does
                        # not exist.
                        continue

                    # Filter again on the client side to cover all filter
                    # operations.
                    if not secret_filter_model.secret_matches(secret_model):
                        continue
                    results.append(secret_model)
        except ClientError as e:
            raise RuntimeError(f"Error listing AWS secrets: {e}")

        # Sort the results
        sorted_results = secret_filter_model.sort_secrets(results)

        # Paginate the results
        total = len(sorted_results)
        if total == 0:
            total_pages = 1
        else:
            total_pages = math.ceil(total / secret_filter_model.size)

        if secret_filter_model.page > total_pages:
            raise ValueError(
                f"Invalid page {secret_filter_model.page}. The requested page "
                f"size is {secret_filter_model.size} and there are a total of "
                f"{total} items for this query. The maximum page value "
                f"therefore is {total_pages}."
            )

        return Page(
            total=total,
            total_pages=total_pages,
            items=sorted_results[
                (secret_filter_model.page - 1)
                * secret_filter_model.size : secret_filter_model.page
                * secret_filter_model.size
            ],
            index=secret_filter_model.page,
            max_size=secret_filter_model.size,
        )

    @track(AnalyticsEvent.UPDATED_SECRET)
    def update_secret(
        self, secret_id: UUID, secret_update: SecretUpdateModel
    ) -> SecretResponseModel:
        """Updates a secret.

        Secret values that are specified as `None` in the update that are
        present in the existing secret are removed from the existing secret.
        Values that are present in both secrets are overwritten. All other
        values in both the existing secret and the update are kept (merged).

        If the update includes a change of name or scope, the scoping rules
        enforced in the secrets store are used to validate the update:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret_id: The ID of the secret to be updated.
            secret_update: The update to be applied.

        Returns:
            The updated secret.

        Raises:
            EntityExistsError: If the update includes a change of name or
                scope and a secret with the same name already exists in the
                same scope.
            RuntimeError: If the AWS Secrets Manager API returns an unexpected
                error.
        """
        secret = self.get_secret(secret_id)

        # Prevent changes to the secret's user or workspace
        assert secret.user is not None
        self._validate_user_and_workspace_update(
            secret_update=secret_update,
            current_user=secret.user.id,
            current_workspace=secret.workspace.id,
        )

        if secret_update.name is not None:
            self._validate_aws_secret_name(secret_update.name)
            secret.name = secret_update.name
        if secret_update.scope is not None:
            secret.scope = secret_update.scope
        if secret_update.values is not None:
            # Merge the existing values with the update values.
            # The values that are set to `None` in the update are removed from
            # the existing secret when we call `.secret_values` later.
            secret.values.update(secret_update.values)

        if secret_update.name is not None or secret_update.scope is not None:
            # Check if a secret with the same name already exists in the same
            # scope.
            assert secret.user is not None
            secret_exists, msg = self._check_secret_scope(
                secret_name=secret.name,
                scope=secret.scope,
                workspace=secret.workspace.id,
                user=secret.user.id,
                exclude_secret_id=secret.id,
            )
            if secret_exists:
                raise EntityExistsError(msg)

        aws_secret_id = self._get_aws_secret_id(secret_id)
        secret_value = json.dumps(secret.secret_values)

        # Convert the ZenML secret metadata to AWS tags
        metadata = self._get_secret_metadata_for_secret(secret)
        tags = self._get_aws_secret_tags(metadata)

        try:
            # One call to update the secret values
            self.client.put_secret_value(
                SecretId=aws_secret_id,
                SecretString=secret_value,
            )
            # Another call to update the tags
            self.client.tag_resource(
                SecretId=aws_secret_id,
                Tags=tags,
            )
            # And another call to get the updated secret metadata which
            # includes the created and updated timestamps.
            describe_secret_response = self.client.describe_secret(
                SecretId=aws_secret_id
            )
        except ClientError as e:
            raise RuntimeError(f"Error updating secret: {e}")

        logger.debug("Updated AWS secret: %s", aws_secret_id)

        self._wait_for_secret_to_propagate(aws_secret_id, tags=tags)

        secret_model = SecretResponseModel(
            id=secret_id,
            name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace,
            user=secret.user,
            values=secret.secret_values,
            created=describe_secret_response["CreatedDate"],
            updated=describe_secret_response["LastChangedDate"],
        )

        return secret_model

    @track(AnalyticsEvent.DELETED_SECRET)
    def delete_secret(self, secret_id: UUID) -> None:
        """Delete a secret.

        Args:
            secret_id: The id of the secret to delete.

        Raises:
            KeyError: If the secret does not exist.
            RuntimeError: If the AWS Secrets Manager API returns an unexpected
                error.
        """
        try:
            self.client.delete_secret(
                SecretId=self._get_aws_secret_id(secret_id),
                # We set this to force immediate deletion of the AWS secret
                # instead of waiting for the recovery window to expire.
                ForceDeleteWithoutRecovery=True,
            )
        except ClientError as e:
            if e.response["Error"]["Code"] == "ResourceNotFoundException":
                raise KeyError(f"Secret with ID {secret_id} not found")

            if (
                e.response["Error"]["Code"] == "InvalidRequestException"
                and "marked for deletion" in e.response["Error"]["Message"]
            ):
                raise KeyError(f"Secret with ID {secret_id} not found")

            raise RuntimeError(
                f"Error deleting secret with ID {secret_id}: {e}"
            )
client: Any property readonly

Initialize and return the AWS Secrets Manager client.

Returns:

Type Description
Any

The AWS Secrets Manager client.

CONFIG_TYPE (SecretsStoreConfiguration) pydantic-model

AWS secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

region_name str

The AWS region name to use.

aws_access_key_id Optional[pydantic.types.SecretStr]

The AWS access key ID to use to authenticate.

aws_secret_access_key Optional[pydantic.types.SecretStr]

The AWS secret access key to use to authenticate.

aws_session_token Optional[pydantic.types.SecretStr]

The AWS session token to use to authenticate.

list_page_size int

The number of secrets to fetch per page when listing secrets.

secret_list_refresh_timeout int

The number of seconds to wait after creating or updating an AWS secret until the changes are reflected in the secrets returned by list_secrets. Set this to zero to disable the wait. This may be necessary because it can take some time for new secrets and updated secrets to be reflected in the result returned by list_secrets on the client side. This value should not be set to a large value, because it blocks ZenML server threads while waiting and can cause performance issues. Disable this if you don't need changes to be reflected immediately on the client side.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
class AWSSecretsStoreConfiguration(SecretsStoreConfiguration):
    """AWS secrets store configuration.

    Attributes:
        type: The type of the store.
        region_name: The AWS region name to use.
        aws_access_key_id: The AWS access key ID to use to authenticate.
        aws_secret_access_key: The AWS secret access key to use to
            authenticate.
        aws_session_token: The AWS session token to use to authenticate.
        list_page_size: The number of secrets to fetch per page when
            listing secrets.
        secret_list_refresh_timeout: The number of seconds to wait after
            creating or updating an AWS secret until the changes are reflected
            in the secrets returned by `list_secrets`. Set this to zero to
            disable the wait. This may be necessary because it can take some
            time for new secrets and updated secrets to be reflected in the
            result returned by `list_secrets` on the client side. This value
            should not be set to a large value, because it blocks ZenML server
            threads while waiting and can cause performance issues.
            Disable this if you don't need changes to be reflected immediately
            on the client side.
    """

    type: SecretsStoreType = SecretsStoreType.AWS
    region_name: str
    aws_access_key_id: Optional[SecretStr] = None
    aws_secret_access_key: Optional[SecretStr] = None
    aws_session_token: Optional[SecretStr] = None
    list_page_size: int = 100
    secret_list_refresh_timeout: int = 0

    class Config:
        """Pydantic configuration class."""

        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Forbid extra attributes set in the class.
    extra = "forbid"
create_secret(*args, **kwargs)

Creates a new secret.

The new secret is also validated against the scoping rules enforced in the secrets store:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret

The secret to create.

required

Returns:

Type Description
Any

The newly created secret.

Exceptions:

Type Description
EntityExistsError

If a secret with the same name already exists in the same scope.

RuntimeError

If the AWS Secrets Manager API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_secret(*args, **kwargs)

Delete a secret.

Parameters:

Name Type Description Default
secret_id

The id of the secret to delete.

required

Exceptions:

Type Description
KeyError

If the secret does not exist.

RuntimeError

If the AWS Secrets Manager API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
get_secret(self, secret_id)

Get a secret by ID.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to fetch.

required

Returns:

Type Description
SecretResponseModel

The secret.

Exceptions:

Type Description
KeyError

If the secret does not exist.

RuntimeError

If the AWS Secrets Manager API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
def get_secret(self, secret_id: UUID) -> SecretResponseModel:
    """Get a secret by ID.

    Args:
        secret_id: The ID of the secret to fetch.

    Returns:
        The secret.

    Raises:
        KeyError: If the secret does not exist.
        RuntimeError: If the AWS Secrets Manager API returns an unexpected
            error.
    """
    aws_secret_id = self._get_aws_secret_id(secret_id)

    try:
        get_secret_value_response = self.client.get_secret_value(
            SecretId=aws_secret_id
        )
        # We need a separate AWS API call to get the AWS secret tags which
        # contain the ZenML secret metadata, since the get_secret_ value API
        # does not return them.
        describe_secret_response = self.client.describe_secret(
            SecretId=aws_secret_id
        )
    except ClientError as e:
        if e.response["Error"]["Code"] == "ResourceNotFoundException":
            raise KeyError(f"Secret with ID {secret_id} not found")

        if (
            e.response["Error"]["Code"] == "InvalidRequestException"
            and "marked for deletion" in e.response["Error"]["Message"]
        ):
            raise KeyError(f"Secret with ID {secret_id} not found")

        raise RuntimeError(
            f"Error fetching secret with ID {secret_id} {e}"
        )

    # The _convert_aws_secret method raises a KeyError if the
    # secret is tied to a workspace or user that no longer exists. Here we
    # simply pass the exception up the stack, as if the secret was not found
    # in the first place, knowing that it will be cascade-deleted soon.
    return self._convert_aws_secret(
        tags=describe_secret_response["Tags"],
        created=describe_secret_response["CreatedDate"],
        updated=describe_secret_response["LastChangedDate"],
        values=get_secret_value_response["SecretString"],
    )
list_secrets(self, secret_filter_model)

List all secrets matching the given filter criteria.

Note that returned secrets do not include any secret values. To fetch the secret values, use get_secret.

Parameters:

Name Type Description Default
secret_filter_model SecretFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[SecretResponseModel]

A list of all secrets matching the filter criteria, with pagination information and sorted according to the filter criteria. The returned secrets do not include any secret values, only metadata. To fetch the secret values, use get_secret individually with each secret.

Exceptions:

Type Description
ValueError

If the filter contains an out-of-bounds page number.

RuntimeError

If the AWS Secrets Manager API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
def list_secrets(
    self, secret_filter_model: SecretFilterModel
) -> Page[SecretResponseModel]:
    """List all secrets matching the given filter criteria.

    Note that returned secrets do not include any secret values. To fetch
    the secret values, use `get_secret`.

    Args:
        secret_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all secrets matching the filter criteria, with pagination
        information and sorted according to the filter criteria. The
        returned secrets do not include any secret values, only metadata. To
        fetch the secret values, use `get_secret` individually with each
        secret.

    Raises:
        ValueError: If the filter contains an out-of-bounds page number.
        RuntimeError: If the AWS Secrets Manager API returns an unexpected
            error.
    """
    # The AWS Secrets Manager API does not natively support the entire
    # range of filtering, sorting and pagination options that ZenML
    # supports. The implementation of this method is therefore a bit
    # involved. We try to make use of the AWS filtering API as much as
    # possible to reduce the number of secrets that we need to fetch, then
    # we apply the rest of filtering, sorting and pagination on
    # the client side.

    metadata_args: Dict[str, Any] = {}
    if secret_filter_model.logical_operator == LogicalOperators.AND:
        # We can only filter on the AWS server side if we have an AND
        # logical operator. Otherwise, we need to filter on the client
        # side.

        for filter in secret_filter_model.list_of_filters:
            # The AWS Secrets Manager API only supports prefix matching. We
            # take advantage of this to filter as much as possible on the
            # AWS server side and we leave the rest to the client.
            if filter.operation not in [
                GenericFilterOps.EQUALS,
                GenericFilterOps.STARTSWITH,
            ]:
                continue

            if filter.column == "id":
                metadata_args["secret_id"] = UUID(filter.value)
            elif filter.column == "name":
                metadata_args["secret_name"] = filter.value
            elif filter.column == "scope":
                metadata_args["scope"] = SecretScope(filter.value)
            elif filter.column == "workspace_id":
                metadata_args["workspace"] = UUID(filter.value)
            elif filter.column == "user_id":
                metadata_args["user"] = UUID(filter.value)
            else:
                # AWS doesn't support filtering on the created/updated
                # timestamps, so we'll have to do that on the client side.
                continue

    # The metadata will always contain at least the filter criteria
    # required to exclude everything but AWS secrets that belong to the
    # current ZenML deployment.
    metadata = self._get_secret_metadata(**metadata_args)
    aws_filters = self._get_aws_secret_filters(metadata)

    results: List[SecretResponseModel] = []

    try:
        # AWS Secrets Manager API pagination is wrapped around the
        # `list_secrets` method call. We use it because we need to fetch all
        # secrets matching the (partial) filter that we set up. Note that
        # the pagination used here has nothing to do with the pagination
        # that we do for the method caller.
        paginator = self.client.get_paginator("list_secrets")
        pages = paginator.paginate(
            Filters=aws_filters,
            PaginationConfig={
                "PageSize": self.config.list_page_size,
            },
        )

        for page in pages:
            for secret in page["SecretList"]:
                try:
                    # NOTE: we do not include the secret values in the
                    # response. We would need a separate API call to fetch
                    # them for each secret, which would be very inefficient
                    # anyway.
                    secret_model = self._convert_aws_secret(
                        tags=secret["Tags"],
                        created=secret["CreatedDate"],
                        updated=secret["LastChangedDate"],
                    )
                except KeyError:
                    # The _convert_aws_secret method raises a KeyError
                    # if the secret is tied to a workspace or user that no
                    # longer exists. Here we pretend that the secret does
                    # not exist.
                    continue

                # Filter again on the client side to cover all filter
                # operations.
                if not secret_filter_model.secret_matches(secret_model):
                    continue
                results.append(secret_model)
    except ClientError as e:
        raise RuntimeError(f"Error listing AWS secrets: {e}")

    # Sort the results
    sorted_results = secret_filter_model.sort_secrets(results)

    # Paginate the results
    total = len(sorted_results)
    if total == 0:
        total_pages = 1
    else:
        total_pages = math.ceil(total / secret_filter_model.size)

    if secret_filter_model.page > total_pages:
        raise ValueError(
            f"Invalid page {secret_filter_model.page}. The requested page "
            f"size is {secret_filter_model.size} and there are a total of "
            f"{total} items for this query. The maximum page value "
            f"therefore is {total_pages}."
        )

    return Page(
        total=total,
        total_pages=total_pages,
        items=sorted_results[
            (secret_filter_model.page - 1)
            * secret_filter_model.size : secret_filter_model.page
            * secret_filter_model.size
        ],
        index=secret_filter_model.page,
        max_size=secret_filter_model.size,
    )
update_secret(*args, **kwargs)

Updates a secret.

Secret values that are specified as None in the update that are present in the existing secret are removed from the existing secret. Values that are present in both secrets are overwritten. All other values in both the existing secret and the update are kept (merged).

If the update includes a change of name or scope, the scoping rules enforced in the secrets store are used to validate the update:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret_id

The ID of the secret to be updated.

required
secret_update

The update to be applied.

required

Returns:

Type Description
Any

The updated secret.

Exceptions:

Type Description
EntityExistsError

If the update includes a change of name or scope and a secret with the same name already exists in the same scope.

RuntimeError

If the AWS Secrets Manager API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
AWSSecretsStoreConfiguration (SecretsStoreConfiguration) pydantic-model

AWS secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

region_name str

The AWS region name to use.

aws_access_key_id Optional[pydantic.types.SecretStr]

The AWS access key ID to use to authenticate.

aws_secret_access_key Optional[pydantic.types.SecretStr]

The AWS secret access key to use to authenticate.

aws_session_token Optional[pydantic.types.SecretStr]

The AWS session token to use to authenticate.

list_page_size int

The number of secrets to fetch per page when listing secrets.

secret_list_refresh_timeout int

The number of seconds to wait after creating or updating an AWS secret until the changes are reflected in the secrets returned by list_secrets. Set this to zero to disable the wait. This may be necessary because it can take some time for new secrets and updated secrets to be reflected in the result returned by list_secrets on the client side. This value should not be set to a large value, because it blocks ZenML server threads while waiting and can cause performance issues. Disable this if you don't need changes to be reflected immediately on the client side.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
class AWSSecretsStoreConfiguration(SecretsStoreConfiguration):
    """AWS secrets store configuration.

    Attributes:
        type: The type of the store.
        region_name: The AWS region name to use.
        aws_access_key_id: The AWS access key ID to use to authenticate.
        aws_secret_access_key: The AWS secret access key to use to
            authenticate.
        aws_session_token: The AWS session token to use to authenticate.
        list_page_size: The number of secrets to fetch per page when
            listing secrets.
        secret_list_refresh_timeout: The number of seconds to wait after
            creating or updating an AWS secret until the changes are reflected
            in the secrets returned by `list_secrets`. Set this to zero to
            disable the wait. This may be necessary because it can take some
            time for new secrets and updated secrets to be reflected in the
            result returned by `list_secrets` on the client side. This value
            should not be set to a large value, because it blocks ZenML server
            threads while waiting and can cause performance issues.
            Disable this if you don't need changes to be reflected immediately
            on the client side.
    """

    type: SecretsStoreType = SecretsStoreType.AWS
    region_name: str
    aws_access_key_id: Optional[SecretStr] = None
    aws_secret_access_key: Optional[SecretStr] = None
    aws_session_token: Optional[SecretStr] = None
    list_page_size: int = 100
    secret_list_refresh_timeout: int = 0

    class Config:
        """Pydantic configuration class."""

        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/aws_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Forbid extra attributes set in the class.
    extra = "forbid"

azure_secrets_store

Azure Secrets Store implementation.

AzureSecretsStore (BaseSecretsStore) pydantic-model

Secrets store implementation that uses the Azure Key Vault API.

This secrets store implementation uses the Azure Key Vault API to store secrets. It allows a single Azure Key Vault to be shared with other ZenML deployments as well as other third party users and applications.

Here are some implementation highlights:

  • the name/ID of an Azure secret is derived from the ZenML secret UUID and a zenml prefix in the form zenml-{zenml_secret_uuid}. This clearly identifies a secret as being managed by ZenML in the Azure console.

  • the Secrets Store also makes heavy use of Azure Key Vault secret tags to store all the metadata associated with a ZenML secret (e.g. the secret name, scope, user and workspace) and to filter secrets by these metadata. The zenml tag in particular is used to identify and group all secrets that belong to the same ZenML deployment.

  • all secret key-values configured in a ZenML secret are stored as a single JSON string value in the Azure Key Vault secret value.

  • when a user or workspace is deleted, the secrets associated with it are deleted automatically via registered event handlers.

Known challenges and limitations:

  • every Azure Key Vault secret has one or more versions. Every update to a secret creates a new version. The created_on and updated_on timestamps returned by the Secrets Store API are the timestamps of the latest version of the secret. This means that we need to fetch the first version of the secret to get the created_on timestamp. This is not ideal, as we'd need to fetch all versions for every secret to get the created_on timestamp during a list operation. So instead we manage the created and updated timestamps ourselves and save them as tags in the Azure Key Vault secret.

Attributes:

Name Type Description
config

The configuration of the Azure secrets store.

TYPE

The type of the store.

CONFIG_TYPE

The type of the store configuration.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
class AzureSecretsStore(BaseSecretsStore):
    """Secrets store implementation that uses the Azure Key Vault API.

    This secrets store implementation uses the Azure Key Vault API to
    store secrets. It allows a single Azure Key Vault to be shared with other
    ZenML deployments as well as other third party users and applications.

    Here are some implementation highlights:

    * the name/ID of an Azure secret is derived from the ZenML secret UUID and a
    `zenml` prefix in the form `zenml-{zenml_secret_uuid}`. This clearly
    identifies a secret as being managed by ZenML in the Azure console.

    * the Secrets Store also makes heavy use of Azure Key Vault secret tags to
    store all the metadata associated with a ZenML secret (e.g. the secret name,
    scope, user and workspace) and to filter secrets by these metadata. The
    `zenml` tag in particular is used to identify and group all secrets that
    belong to the same ZenML deployment.

    * all secret key-values configured in a ZenML secret are stored as a single
    JSON string value in the Azure Key Vault secret value.

    * when a user or workspace is deleted, the secrets associated with it are
    deleted automatically via registered event handlers.


    Known challenges and limitations:

    * every Azure Key Vault secret has one or more versions. Every update to a
    secret creates a new version. The created_on and updated_on timestamps
    returned by the Secrets Store API are the timestamps of the latest version
    of the secret. This means that we need to fetch the first version of the
    secret to get the created_on timestamp. This is not ideal, as we'd need to
    fetch all versions for every secret to get the created_on timestamp during
    a list operation. So instead we manage the `created` and `updated`
    timestamps ourselves and save them as tags in the Azure Key Vault secret.

    Attributes:
        config: The configuration of the Azure secrets store.
        TYPE: The type of the store.
        CONFIG_TYPE: The type of the store configuration.
    """

    config: AzureSecretsStoreConfiguration
    TYPE: ClassVar[SecretsStoreType] = SecretsStoreType.AZURE
    CONFIG_TYPE: ClassVar[
        Type[SecretsStoreConfiguration]
    ] = AzureSecretsStoreConfiguration

    _client: Optional[SecretClient] = None

    @property
    def client(self) -> SecretClient:
        """Initialize and return the Azure Key Vault client.

        Returns:
            The Azure Key Vault client.
        """
        if self._client is None:
            azure_logger = logging.getLogger("azure")

            # Suppress the INFO logging level of the Azure SDK if the
            # ZenML logging level is WARNING or lower.
            if logger.level <= logging.WARNING:
                azure_logger.setLevel(logging.WARNING)
            else:
                azure_logger.setLevel(logging.INFO)

            # Initialize the Azure Key Vault client with the
            # credentials from the configuration, if provided.
            vault_url = f"https://{self.config.key_vault_name}.vault.azure.net"
            credential: Union[
                ClientSecretCredential,
                DefaultAzureCredential,
            ]
            if (
                self.config.azure_client_id
                and self.config.azure_tenant_id
                and self.config.azure_client_secret
            ):
                credential = ClientSecretCredential(
                    tenant_id=self.config.azure_tenant_id.get_secret_value(),
                    client_id=self.config.azure_client_id.get_secret_value(),
                    client_secret=self.config.azure_client_secret.get_secret_value(),
                )
            else:
                credential = DefaultAzureCredential()
            self._client = SecretClient(
                vault_url=vault_url, credential=credential
            )
        return self._client

    # ====================================
    # Secrets Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the Azure secrets store."""
        logger.debug("Initializing AzureSecretsStore")

        # Initialize the Azure client early, just to catch any configuration or
        # authentication errors early, before the Secrets Store is used.
        _ = self.client

    # ------
    # Secrets
    # ------

    @staticmethod
    def _validate_azure_secret_name(name: str) -> None:
        """Validate a secret name.

        Azure secret names must contain only alphanumeric characters and the
        character `-`.

        Given that the ZenML secret name is stored as an Azure Key Vault secret
        label, we are also limited by the 256 maximum size limitation that Azure
        imposes on label values.

        Args:
            name: the secret name

        Raises:
            ValueError: if the secret name is invalid
        """
        if not re.fullmatch(r"[0-9a-zA-Z-]+", name):
            raise ValueError(
                f"Invalid secret name or namespace '{name}'. Must contain "
                f"only alphanumeric characters and the character -."
            )

        if len(name) > 256:
            raise ValueError(
                f"Invalid secret name or namespace '{name}'. The length is "
                f"limited to maximum 256 characters."
            )

    @staticmethod
    def _get_azure_secret_id(
        secret_id: UUID,
    ) -> str:
        """Get the Azure secret ID corresponding to a ZenML secret ID.

        The convention used for Azure secret names is to use the ZenML
        secret UUID prefixed with `zenml` as the Azure secret name,
        i.e. `zenml-<secret_uuid>`.

        Args:
            secret_id: The ZenML secret ID.

        Returns:
            The Azure secret name.
        """
        return f"{AZURE_ZENML_SECRET_NAME_PREFIX}-{str(secret_id)}"

    def _convert_azure_secret(
        self,
        tags: Dict[str, str],
        values: Optional[str] = None,
    ) -> SecretResponseModel:
        """Create a ZenML secret model from data stored in an Azure secret.

        If the Azure secret cannot be converted, the method acts as if the
        secret does not exist and raises a KeyError.

        Args:
            tags: The Azure secret tags.
            values: The Azure secret values encoded as a JSON string
                (optional).

        Returns:
            The ZenML secret.

        Raises:
            KeyError: if the Azure secret cannot be converted.
        """
        try:
            created = datetime.fromisoformat(
                tags[ZENML_AZURE_SECRET_CREATED_KEY],
            )
            updated = datetime.fromisoformat(
                tags[ZENML_AZURE_SECRET_UPDATED_KEY],
            )
        except KeyError as e:
            raise KeyError(
                f"Secret could not be retrieved: missing required metadata: {e}"
            )

        return self._create_secret_from_metadata(
            metadata=tags,
            created=created,
            updated=updated,
            values=json.loads(values) if values else None,
        )

    @track(AnalyticsEvent.CREATED_SECRET, v2=True)
    def create_secret(self, secret: SecretRequestModel) -> SecretResponseModel:
        """Creates a new secret.

        The new secret is also validated against the scoping rules enforced in
        the secrets store:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret: The secret to create.

        Returns:
            The newly created secret.

        Raises:
            EntityExistsError: If a secret with the same name already exists
                in the same scope.
            RuntimeError: If the Azure Key Vault API returns an unexpected
                error.
        """
        self._validate_azure_secret_name(secret.name)
        user, workspace = self._validate_user_and_workspace(
            secret.user, secret.workspace
        )

        # Check if a secret with the same name already exists in the same
        # scope.
        secret_exists, msg = self._check_secret_scope(
            secret_name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace,
            user=secret.user,
        )
        if secret_exists:
            raise EntityExistsError(msg)

        # Generate a new UUID for the secret
        secret_id = uuid.uuid4()
        azure_secret_id = self._get_azure_secret_id(secret_id)
        secret_value = json.dumps(secret.secret_values)

        # Use the ZenML secret metadata as Azure tags
        metadata = self._get_secret_metadata_for_secret(
            secret, secret_id=secret_id
        )

        # We manage the created and updated times ourselves, so we don't need to
        # rely on the Azure Key Vault API to set them.
        created = datetime.utcnow()
        metadata[ZENML_AZURE_SECRET_CREATED_KEY] = created.isoformat()
        metadata[ZENML_AZURE_SECRET_UPDATED_KEY] = created.isoformat()

        try:
            self.client.set_secret(
                azure_secret_id,
                secret_value,
                tags=metadata,
                content_type="application/json",
            )
        except HttpResponseError as e:
            raise RuntimeError(f"Error creating secret: {e}")

        logger.debug("Created Azure secret: %s", azure_secret_id)

        secret_model = SecretResponseModel(
            id=secret_id,
            name=secret.name,
            scope=secret.scope,
            workspace=workspace,
            user=user,
            values=secret.secret_values,
            created=created,
            updated=created,
        )

        return secret_model

    def get_secret(self, secret_id: UUID) -> SecretResponseModel:
        """Get a secret by ID.

        Args:
            secret_id: The ID of the secret to fetch.

        Returns:
            The secret.

        Raises:
            KeyError: If the secret does not exist.
            RuntimeError: If the Azure Key Vault API returns an unexpected
                error.
        """
        azure_secret_id = self._get_azure_secret_id(secret_id)

        try:
            azure_secret = self.client.get_secret(
                azure_secret_id,
            )
        except ResourceNotFoundError:
            raise KeyError(f"Secret with ID {secret_id} not found")
        except HttpResponseError as e:
            raise RuntimeError(
                f"Error fetching secret with ID {secret_id} {e}"
            )

        # The _convert_azure_secret method raises a KeyError if the
        # secret is tied to a workspace or user that no longer exists. Here we
        # simply pass the exception up the stack, as if the secret was not found
        # in the first place, knowing that it will be cascade-deleted soon.
        assert azure_secret.properties.tags is not None
        return self._convert_azure_secret(
            tags=azure_secret.properties.tags,
            values=azure_secret.value,
        )

    def list_secrets(
        self, secret_filter_model: SecretFilterModel
    ) -> Page[SecretResponseModel]:
        """List all secrets matching the given filter criteria.

        Note that returned secrets do not include any secret values. To fetch
        the secret values, use `get_secret`.

        Args:
            secret_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all secrets matching the filter criteria, with pagination
            information and sorted according to the filter criteria. The
            returned secrets do not include any secret values, only metadata. To
            fetch the secret values, use `get_secret` individually with each
            secret.

        Raises:
            ValueError: If the filter contains an out-of-bounds page number.
            RuntimeError: If the Azure Key Vault API returns an unexpected
                error.
        """
        # The Azure Key Vault API does not natively support any of the
        # filtering, sorting or pagination options that ZenML supports. The
        # implementation of this method therefore has to fetch all secrets from
        # the Key Vault, then apply the filtering, sorting and pagination on
        # the client side.

        # The metadata will always contain at least the filter criteria
        # required to exclude everything but Azure secrets that belong to the
        # current ZenML deployment.
        results: List[SecretResponseModel] = []

        try:
            all_secrets = self.client.list_properties_of_secrets()
            for secret_property in all_secrets:
                try:
                    # NOTE: we do not include the secret values in the
                    # response. We would need a separate API call to fetch
                    # them for each secret, which would be very inefficient
                    # anyway.
                    assert secret_property.tags is not None
                    secret_model = self._convert_azure_secret(
                        tags=secret_property.tags,
                    )
                except KeyError:
                    # The _convert_azure_secret method raises a KeyError
                    # if the secret is tied to a workspace or user that no
                    # longer exists or if it is otherwise not valid. Here we
                    # pretend that the secret does not exist.
                    continue

                # Filter the secret on the client side.
                if not secret_filter_model.secret_matches(secret_model):
                    continue
                results.append(secret_model)
        except HttpResponseError as e:
            raise RuntimeError(f"Error listing Azure Key Vault secrets: {e}")

        # Sort the results
        sorted_results = secret_filter_model.sort_secrets(results)

        # Paginate the results
        total = len(sorted_results)
        if total == 0:
            total_pages = 1
        else:
            total_pages = math.ceil(total / secret_filter_model.size)

        if secret_filter_model.page > total_pages:
            raise ValueError(
                f"Invalid page {secret_filter_model.page}. The requested page "
                f"size is {secret_filter_model.size} and there are a total of "
                f"{total} items for this query. The maximum page value "
                f"therefore is {total_pages}."
            )

        return Page(
            total=total,
            total_pages=total_pages,
            items=sorted_results[
                (secret_filter_model.page - 1)
                * secret_filter_model.size : secret_filter_model.page
                * secret_filter_model.size
            ],
            index=secret_filter_model.page,
            max_size=secret_filter_model.size,
        )

    @track(AnalyticsEvent.UPDATED_SECRET)
    def update_secret(
        self, secret_id: UUID, secret_update: SecretUpdateModel
    ) -> SecretResponseModel:
        """Updates a secret.

        Secret values that are specified as `None` in the update that are
        present in the existing secret are removed from the existing secret.
        Values that are present in both secrets are overwritten. All other
        values in both the existing secret and the update are kept (merged).

        If the update includes a change of name or scope, the scoping rules
        enforced in the secrets store are used to validate the update:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret_id: The ID of the secret to be updated.
            secret_update: The update to be applied.

        Returns:
            The updated secret.

        Raises:
            EntityExistsError: If the update includes a change of name or
                scope and a secret with the same name already exists in the
                same scope.
            RuntimeError: If the Azure Key Vault API returns an unexpected
                error.
        """
        secret = self.get_secret(secret_id)

        # Prevent changes to the secret's user or workspace
        assert secret.user is not None
        self._validate_user_and_workspace_update(
            secret_update=secret_update,
            current_user=secret.user.id,
            current_workspace=secret.workspace.id,
        )

        if secret_update.name is not None:
            self._validate_azure_secret_name(secret_update.name)
            secret.name = secret_update.name
        if secret_update.scope is not None:
            secret.scope = secret_update.scope
        if secret_update.values is not None:
            # Merge the existing values with the update values.
            # The values that are set to `None` in the update are removed from
            # the existing secret when we call `.secret_values` later.
            secret.values.update(secret_update.values)

        if secret_update.name is not None or secret_update.scope is not None:
            # Check if a secret with the same name already exists in the same
            # scope.
            assert secret.user is not None
            secret_exists, msg = self._check_secret_scope(
                secret_name=secret.name,
                scope=secret.scope,
                workspace=secret.workspace.id,
                user=secret.user.id,
                exclude_secret_id=secret.id,
            )
            if secret_exists:
                raise EntityExistsError(msg)

        azure_secret_id = self._get_azure_secret_id(secret_id)
        secret_value = json.dumps(secret.secret_values)

        # Convert the ZenML secret metadata to Azure tags
        metadata = self._get_secret_metadata_for_secret(secret)

        # We manage the created and updated times ourselves, so we don't need to
        # rely on the Azure Key Vault API to set them.
        updated = datetime.utcnow()
        metadata[ZENML_AZURE_SECRET_CREATED_KEY] = secret.created.isoformat()
        metadata[ZENML_AZURE_SECRET_UPDATED_KEY] = updated.isoformat()

        try:
            self.client.set_secret(
                azure_secret_id,
                secret_value,
                tags=metadata,
                content_type="application/json",
            )
        except HttpResponseError as e:
            raise RuntimeError(f"Error updating secret {secret_id}: {e}")

        logger.debug("Updated Azure secret: %s", azure_secret_id)

        secret_model = SecretResponseModel(
            id=secret_id,
            name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace,
            user=secret.user,
            values=secret.secret_values,
            created=secret.created,
            updated=updated,
        )

        return secret_model

    @track(AnalyticsEvent.DELETED_SECRET)
    def delete_secret(self, secret_id: UUID) -> None:
        """Delete a secret.

        Args:
            secret_id: The id of the secret to delete.

        Raises:
            KeyError: If the secret does not exist.
            RuntimeError: If the Azure Key Vault API returns an unexpected
                error.
        """
        try:
            self.client.begin_delete_secret(
                self._get_azure_secret_id(secret_id),
            ).wait()
        except ResourceNotFoundError:
            raise KeyError(f"Secret with ID {secret_id} not found")
        except HttpResponseError as e:
            raise RuntimeError(
                f"Error deleting secret with ID {secret_id}: {e}"
            )
client: SecretClient property readonly

Initialize and return the Azure Key Vault client.

Returns:

Type Description
SecretClient

The Azure Key Vault client.

CONFIG_TYPE (SecretsStoreConfiguration) pydantic-model

Azure secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

key_vault_name str

Name of the Azure Key Vault that this secrets store will use to store secrets.

azure_client_id Optional[pydantic.types.SecretStr]

The client ID of the Azure application service principal that will be used to access the Azure Key Vault. If not set, the default Azure credential chain will be used.

azure_client_secret Optional[pydantic.types.SecretStr]

The client secret of the Azure application service principal that will be used to access the Azure Key Vault. If not set, the default Azure credential chain will be used.

azure_tenant_id Optional[pydantic.types.SecretStr]

The tenant ID of the Azure application service principal that will be used to access the Azure Key Vault. If not set, the default Azure credential chain will be used.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
class AzureSecretsStoreConfiguration(SecretsStoreConfiguration):
    """Azure secrets store configuration.

    Attributes:
        type: The type of the store.
        key_vault_name: Name of the Azure Key Vault that this secrets store
            will use to store secrets.
        azure_client_id: The client ID of the Azure application service
            principal that will be used to access the Azure Key Vault. If not
            set, the default Azure credential chain will be used.
        azure_client_secret: The client secret of the Azure application
            service principal that will be used to access the Azure Key Vault.
            If not set, the default Azure credential chain will be used.
        azure_tenant_id: The tenant ID of the Azure application service
            principal that will be used to access the Azure Key Vault. If not
            set, the default Azure credential chain will be used.
    """

    type: SecretsStoreType = SecretsStoreType.AZURE

    key_vault_name: str
    azure_client_id: Optional[SecretStr] = None
    azure_client_secret: Optional[SecretStr] = None
    azure_tenant_id: Optional[SecretStr] = None

    class Config:
        """Pydantic configuration class."""

        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Forbid extra attributes set in the class.
    extra = "forbid"
create_secret(*args, **kwargs)

Creates a new secret.

The new secret is also validated against the scoping rules enforced in the secrets store:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret

The secret to create.

required

Returns:

Type Description
Any

The newly created secret.

Exceptions:

Type Description
EntityExistsError

If a secret with the same name already exists in the same scope.

RuntimeError

If the Azure Key Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_secret(*args, **kwargs)

Delete a secret.

Parameters:

Name Type Description Default
secret_id

The id of the secret to delete.

required

Exceptions:

Type Description
KeyError

If the secret does not exist.

RuntimeError

If the Azure Key Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
get_secret(self, secret_id)

Get a secret by ID.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to fetch.

required

Returns:

Type Description
SecretResponseModel

The secret.

Exceptions:

Type Description
KeyError

If the secret does not exist.

RuntimeError

If the Azure Key Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
def get_secret(self, secret_id: UUID) -> SecretResponseModel:
    """Get a secret by ID.

    Args:
        secret_id: The ID of the secret to fetch.

    Returns:
        The secret.

    Raises:
        KeyError: If the secret does not exist.
        RuntimeError: If the Azure Key Vault API returns an unexpected
            error.
    """
    azure_secret_id = self._get_azure_secret_id(secret_id)

    try:
        azure_secret = self.client.get_secret(
            azure_secret_id,
        )
    except ResourceNotFoundError:
        raise KeyError(f"Secret with ID {secret_id} not found")
    except HttpResponseError as e:
        raise RuntimeError(
            f"Error fetching secret with ID {secret_id} {e}"
        )

    # The _convert_azure_secret method raises a KeyError if the
    # secret is tied to a workspace or user that no longer exists. Here we
    # simply pass the exception up the stack, as if the secret was not found
    # in the first place, knowing that it will be cascade-deleted soon.
    assert azure_secret.properties.tags is not None
    return self._convert_azure_secret(
        tags=azure_secret.properties.tags,
        values=azure_secret.value,
    )
list_secrets(self, secret_filter_model)

List all secrets matching the given filter criteria.

Note that returned secrets do not include any secret values. To fetch the secret values, use get_secret.

Parameters:

Name Type Description Default
secret_filter_model SecretFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[SecretResponseModel]

A list of all secrets matching the filter criteria, with pagination information and sorted according to the filter criteria. The returned secrets do not include any secret values, only metadata. To fetch the secret values, use get_secret individually with each secret.

Exceptions:

Type Description
ValueError

If the filter contains an out-of-bounds page number.

RuntimeError

If the Azure Key Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
def list_secrets(
    self, secret_filter_model: SecretFilterModel
) -> Page[SecretResponseModel]:
    """List all secrets matching the given filter criteria.

    Note that returned secrets do not include any secret values. To fetch
    the secret values, use `get_secret`.

    Args:
        secret_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all secrets matching the filter criteria, with pagination
        information and sorted according to the filter criteria. The
        returned secrets do not include any secret values, only metadata. To
        fetch the secret values, use `get_secret` individually with each
        secret.

    Raises:
        ValueError: If the filter contains an out-of-bounds page number.
        RuntimeError: If the Azure Key Vault API returns an unexpected
            error.
    """
    # The Azure Key Vault API does not natively support any of the
    # filtering, sorting or pagination options that ZenML supports. The
    # implementation of this method therefore has to fetch all secrets from
    # the Key Vault, then apply the filtering, sorting and pagination on
    # the client side.

    # The metadata will always contain at least the filter criteria
    # required to exclude everything but Azure secrets that belong to the
    # current ZenML deployment.
    results: List[SecretResponseModel] = []

    try:
        all_secrets = self.client.list_properties_of_secrets()
        for secret_property in all_secrets:
            try:
                # NOTE: we do not include the secret values in the
                # response. We would need a separate API call to fetch
                # them for each secret, which would be very inefficient
                # anyway.
                assert secret_property.tags is not None
                secret_model = self._convert_azure_secret(
                    tags=secret_property.tags,
                )
            except KeyError:
                # The _convert_azure_secret method raises a KeyError
                # if the secret is tied to a workspace or user that no
                # longer exists or if it is otherwise not valid. Here we
                # pretend that the secret does not exist.
                continue

            # Filter the secret on the client side.
            if not secret_filter_model.secret_matches(secret_model):
                continue
            results.append(secret_model)
    except HttpResponseError as e:
        raise RuntimeError(f"Error listing Azure Key Vault secrets: {e}")

    # Sort the results
    sorted_results = secret_filter_model.sort_secrets(results)

    # Paginate the results
    total = len(sorted_results)
    if total == 0:
        total_pages = 1
    else:
        total_pages = math.ceil(total / secret_filter_model.size)

    if secret_filter_model.page > total_pages:
        raise ValueError(
            f"Invalid page {secret_filter_model.page}. The requested page "
            f"size is {secret_filter_model.size} and there are a total of "
            f"{total} items for this query. The maximum page value "
            f"therefore is {total_pages}."
        )

    return Page(
        total=total,
        total_pages=total_pages,
        items=sorted_results[
            (secret_filter_model.page - 1)
            * secret_filter_model.size : secret_filter_model.page
            * secret_filter_model.size
        ],
        index=secret_filter_model.page,
        max_size=secret_filter_model.size,
    )
update_secret(*args, **kwargs)

Updates a secret.

Secret values that are specified as None in the update that are present in the existing secret are removed from the existing secret. Values that are present in both secrets are overwritten. All other values in both the existing secret and the update are kept (merged).

If the update includes a change of name or scope, the scoping rules enforced in the secrets store are used to validate the update:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret_id

The ID of the secret to be updated.

required
secret_update

The update to be applied.

required

Returns:

Type Description
Any

The updated secret.

Exceptions:

Type Description
EntityExistsError

If the update includes a change of name or scope and a secret with the same name already exists in the same scope.

RuntimeError

If the Azure Key Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
AzureSecretsStoreConfiguration (SecretsStoreConfiguration) pydantic-model

Azure secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

key_vault_name str

Name of the Azure Key Vault that this secrets store will use to store secrets.

azure_client_id Optional[pydantic.types.SecretStr]

The client ID of the Azure application service principal that will be used to access the Azure Key Vault. If not set, the default Azure credential chain will be used.

azure_client_secret Optional[pydantic.types.SecretStr]

The client secret of the Azure application service principal that will be used to access the Azure Key Vault. If not set, the default Azure credential chain will be used.

azure_tenant_id Optional[pydantic.types.SecretStr]

The tenant ID of the Azure application service principal that will be used to access the Azure Key Vault. If not set, the default Azure credential chain will be used.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
class AzureSecretsStoreConfiguration(SecretsStoreConfiguration):
    """Azure secrets store configuration.

    Attributes:
        type: The type of the store.
        key_vault_name: Name of the Azure Key Vault that this secrets store
            will use to store secrets.
        azure_client_id: The client ID of the Azure application service
            principal that will be used to access the Azure Key Vault. If not
            set, the default Azure credential chain will be used.
        azure_client_secret: The client secret of the Azure application
            service principal that will be used to access the Azure Key Vault.
            If not set, the default Azure credential chain will be used.
        azure_tenant_id: The tenant ID of the Azure application service
            principal that will be used to access the Azure Key Vault. If not
            set, the default Azure credential chain will be used.
    """

    type: SecretsStoreType = SecretsStoreType.AZURE

    key_vault_name: str
    azure_client_id: Optional[SecretStr] = None
    azure_client_secret: Optional[SecretStr] = None
    azure_tenant_id: Optional[SecretStr] = None

    class Config:
        """Pydantic configuration class."""

        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/azure_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Forbid extra attributes set in the class.
    extra = "forbid"

base_secrets_store

Base Secrets Store implementation.

BaseSecretsStore (BaseModel, SecretsStoreInterface, AnalyticsTrackerMixin, ABC) pydantic-model

Base class for accessing and persisting ZenML secret objects.

Attributes:

Name Type Description
config

The configuration of the secret store.

track_analytics

Only send analytics if set to True.

_zen_store

The ZenML store that owns this secrets store.

Source code in zenml/zen_stores/secrets_stores/base_secrets_store.py
class BaseSecretsStore(
    BaseModel, SecretsStoreInterface, AnalyticsTrackerMixin, ABC
):
    """Base class for accessing and persisting ZenML secret objects.

    Attributes:
        config: The configuration of the secret store.
        track_analytics: Only send analytics if set to `True`.
        _zen_store: The ZenML store that owns this secrets store.
    """

    config: SecretsStoreConfiguration
    track_analytics: bool = True
    _zen_store: Optional["BaseZenStore"] = None

    TYPE: ClassVar[SecretsStoreType]
    CONFIG_TYPE: ClassVar[Type[SecretsStoreConfiguration]]

    # ---------------------------------
    # Initialization and configuration
    # ---------------------------------

    def __init__(
        self,
        zen_store: "BaseZenStore",
        **kwargs: Any,
    ) -> None:
        """Create and initialize a secrets store.

        Args:
            zen_store: The ZenML store that owns this secrets store.
            **kwargs: Additional keyword arguments to pass to the Pydantic
                constructor.

        Raises:
            RuntimeError: If the store cannot be initialized.
        """
        super().__init__(**kwargs)
        self._zen_store = zen_store

        self.zen_store.register_event_handler(
            StoreEvent.WORKSPACE_DELETED, self._on_workspace_deleted
        )

        self.zen_store.register_event_handler(
            StoreEvent.USER_DELETED, self._on_user_deleted
        )

        try:
            self._initialize()
        except Exception as e:
            raise RuntimeError(
                f"Error initializing {self.type.value} secrets store: {str(e)}"
            ) from e

    @staticmethod
    def _load_custom_store_class(
        store_config: SecretsStoreConfiguration,
    ) -> Type["BaseSecretsStore"]:
        """Loads the custom secrets store class from the given config.

        Args:
            store_config: The configuration of the secrets store.

        Returns:
            The secrets store class corresponding to the configured custom
            secrets store.

        Raises:
            ValueError: If the configured class path cannot be imported or is
                not a subclass of `BaseSecretsStore`.
        """
        # Ensured through Pydantic root validation
        assert store_config.class_path is not None

        # Import the class dynamically
        try:
            store_class = source_utils.load_and_validate_class(
                store_config.class_path, expected_class=BaseSecretsStore
            )
        except (ImportError, AttributeError) as e:
            raise ValueError(
                f"Could not import class `{store_config.class_path}`: {str(e)}"
            ) from e

        return store_class

    @staticmethod
    def get_store_class(
        store_config: SecretsStoreConfiguration,
    ) -> Type["BaseSecretsStore"]:
        """Returns the class of the given secrets store type.

        Args:
            store_config: The configuration of the secrets store.

        Returns:
            The class corresponding to the configured secrets store or None if
            the type is unknown.

        Raises:
            TypeError: If the secrets store type is unsupported.
        """
        if store_config.type == SecretsStoreType.SQL:
            from zenml.zen_stores.secrets_stores.sql_secrets_store import (
                SqlSecretsStore,
            )

            return SqlSecretsStore

        if store_config.type == SecretsStoreType.REST:
            from zenml.zen_stores.secrets_stores.rest_secrets_store import (
                RestSecretsStore,
            )

            return RestSecretsStore

        if store_config.type == SecretsStoreType.AWS:
            from zenml.zen_stores.secrets_stores.aws_secrets_store import (
                AWSSecretsStore,
            )

            return AWSSecretsStore
        elif store_config.type == SecretsStoreType.GCP:
            from zenml.zen_stores.secrets_stores.gcp_secrets_store import (
                GCPSecretsStore,
            )

            return GCPSecretsStore
        elif store_config.type == SecretsStoreType.AZURE:
            from zenml.zen_stores.secrets_stores.azure_secrets_store import (
                AzureSecretsStore,
            )

            return AzureSecretsStore
        elif store_config.type == SecretsStoreType.HASHICORP:
            from zenml.zen_stores.secrets_stores.hashicorp_secrets_store import (
                HashiCorpVaultSecretsStore,
            )

            return HashiCorpVaultSecretsStore
        elif store_config.type != SecretsStoreType.CUSTOM:
            raise TypeError(
                f"No store implementation found for secrets store type "
                f"`{store_config.type.value}`."
            )

        return BaseSecretsStore._load_custom_store_class(store_config)

    @staticmethod
    def create_store(
        config: SecretsStoreConfiguration,
        **kwargs: Any,
    ) -> "BaseSecretsStore":
        """Create and initialize a secrets store from a secrets store configuration.

        Args:
            config: The secrets store configuration to use.
            **kwargs: Additional keyword arguments to pass to the store class

        Returns:
            The initialized secrets store.
        """
        logger.debug(
            f"Creating secrets store with type '{config.type.value}'..."
        )
        store_class = BaseSecretsStore.get_store_class(config)
        store = store_class(
            config=config,
            **kwargs,
        )
        return store

    @property
    def type(self) -> SecretsStoreType:
        """The type of the secrets store.

        Returns:
            The type of the secrets store.
        """
        return self.TYPE

    @property
    def zen_store(self) -> "BaseZenStore":
        """The ZenML store that owns this secrets store.

        Returns:
            The ZenML store that owns this secrets store.

        Raises:
            ValueError: If the store is not initialized.
        """
        if not self._zen_store:
            raise ValueError("Store not initialized")
        return self._zen_store

    # --------------------
    # Store Event Handlers
    # --------------------

    def _on_workspace_deleted(
        self, event: StoreEvent, workspace_id: UUID
    ) -> None:
        """Handle the deletion of a workspace.

        This method deletes all secrets associated with the given workspace.

        Args:
            event: The store event.
            workspace_id: The ID of the workspace that was deleted.
        """
        logger.debug(
            "Handling workspace deletion event for workspace %s", workspace_id
        )

        # Delete all secrets associated with the workspace.
        secrets = depaginate(
            partial(
                self.list_secrets,
                secret_filter_model=SecretFilterModel(
                    workspace_id=workspace_id
                ),
            )
        )
        for secret in secrets:
            try:
                self.delete_secret(secret.id)
            except KeyError:
                pass
            except Exception as e:
                logger.warning("Failed to delete secret %s: %s", secret.id, e)

    def _on_user_deleted(self, event: StoreEvent, user_id: UUID) -> None:
        """Handle the deletion of a user.

        This method deletes all secrets associated with the given user.

        Args:
            event: The store event.
            user_id: The ID of the user that was deleted.
        """
        logger.debug("Handling user deletion event for user %s", user_id)

        # Delete all secrets associated with the user.
        secrets = depaginate(
            partial(
                self.list_secrets,
                secret_filter_model=SecretFilterModel(user_id=user_id),
            )
        )
        for secret in secrets:
            try:
                self.delete_secret(secret.id)
            except KeyError:
                pass
            except Exception as e:
                logger.warning("Failed to delete secret %s: %s", secret.id, e)

    # ------------------------------------------
    # Common helpers for Secrets Store back-ends
    # ------------------------------------------

    def _validate_user_and_workspace(
        self, user_id: UUID, workspace_id: UUID
    ) -> Tuple[UserResponseModel, WorkspaceResponseModel]:
        """Validates that the given user and workspace IDs are valid.

        This method calls the ZenML store to validate the user and workspace
        IDs. It raises a KeyError exception if either the user or workspace
        does not exist.

        Args:
            user_id: The ID of the user to validate.
            workspace_id: The ID of the workspace to validate.

        Returns:
            The user and workspace.
        """
        user = self.zen_store.get_user(user_id)
        workspace = self.zen_store.get_workspace(workspace_id)

        return user, workspace

    def _validate_user_and_workspace_update(
        self,
        secret_update: SecretUpdateModel,
        current_user: UUID,
        current_workspace: UUID,
    ) -> None:
        """Validates that a secret update does not change the user or workspace.

        Args:
            secret_update: Secret update.
            current_user: The current user ID.
            current_workspace: The current workspace ID.

        Raises:
            IllegalOperationError: If the user or workspace is changed.
        """
        if secret_update.user and current_user != secret_update.user:
            raise IllegalOperationError("Cannot change the user of a secret.")
        if (
            secret_update.workspace
            and current_workspace != secret_update.workspace
        ):
            raise IllegalOperationError(
                "Cannot change the workspace of a secret."
            )

    def _check_secret_scope(
        self,
        secret_name: str,
        scope: SecretScope,
        workspace: UUID,
        user: UUID,
        exclude_secret_id: Optional[UUID] = None,
    ) -> Tuple[bool, str]:
        """Checks if a secret with the given name already exists in the given scope.

        This method enforces the following scope rules:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret_name: The name of the secret.
            scope: The scope of the secret.
            workspace: The ID of the workspace to which the secret belongs.
            user: The ID of the user to which the secret belongs.
            exclude_secret_id: The ID of a secret to exclude from the check
                (used e.g. during an update to exclude the existing secret).

        Returns:
            True if a secret with the given name already exists in the given
            scope, False otherwise, and an error message.
        """
        filter = SecretFilterModel(
            name=secret_name,
            scope=scope,
            page=1,
            size=2,  # We only need to know if there is more than one secret
        )

        if scope in [SecretScope.WORKSPACE, SecretScope.USER]:
            filter.workspace_id = workspace
        if scope == SecretScope.USER:
            filter.user_id = user

        existing_secrets = self.list_secrets(secret_filter_model=filter).items
        if exclude_secret_id is not None:
            existing_secrets = [
                s for s in existing_secrets if s.id != exclude_secret_id
            ]

        if existing_secrets:
            existing_secret_model = existing_secrets[0]

            msg = (
                f"Found an existing {scope.value} scoped secret with the "
                f"same '{secret_name}' name"
            )
            if scope in [SecretScope.WORKSPACE, SecretScope.USER]:
                msg += (
                    f" in the same '{existing_secret_model.workspace.name}' "
                    f"workspace"
                )
            if scope == SecretScope.USER:
                assert existing_secret_model.user
                msg += (
                    f" for the same '{existing_secret_model.user.name}' user"
                )

            return True, msg

        return False, ""

    # --------------------------------------------------------
    # Helpers for Secrets Store back-ends that use tags/labels
    # --------------------------------------------------------

    def _get_secret_metadata(
        self,
        secret_id: Optional[UUID] = None,
        secret_name: Optional[str] = None,
        scope: Optional[SecretScope] = None,
        workspace: Optional[UUID] = None,
        user: Optional[UUID] = None,
    ) -> Dict[str, str]:
        """Get a dictionary with metadata that can be used as tags/labels.

        This utility method can be used with Secrets Managers that can
        associate metadata (e.g. tags, labels) with a secret. The metadata can
        be configured alongside each secret and then used as a filter criteria
        when running queries against the backend e.g. to retrieve all the
        secrets within a given scope or to retrieve all secrets with a given
        name within a given scope.

        NOTE: the ZENML_SECRET_LABEL is always included in the metadata to
        distinguish ZenML secrets from other secrets that might be stored in
        the same backend, as well as to distinguish between different ZenML
        deployments using the same backend. Its value is set to the ZenML
        deployment ID and it should be included in all queries to the backend.

        Args:
            secret_id: Optional secret ID to include in the metadata.
            secret_name: Optional secret name to include in the metadata.
            scope: Optional scope to include in the metadata.
            workspace: Optional workspace ID to include in the metadata.
            user: Optional user ID to include in the scope metadata.

        Returns:
            Dictionary with secret metadata information.
        """
        # Always include the main ZenML label to distinguish ZenML secrets
        # from other secrets that might be stored in the same backend and
        # to distinguish between different ZenML deployments using the same
        # backend.
        metadata: Dict[str, str] = {
            ZENML_SECRET_LABEL: str(self.zen_store.get_store_info().id)
        }

        if secret_id:
            metadata[ZENML_SECRET_ID_LABEL] = str(secret_id)
        if secret_name:
            metadata[ZENML_SECRET_NAME_LABEL] = secret_name
        if scope:
            metadata[ZENML_SECRET_SCOPE_LABEL] = scope.value
        if workspace:
            metadata[ZENML_SECRET_WORKSPACE_LABEL] = str(workspace)
        if user:
            metadata[ZENML_SECRET_USER_LABEL] = str(user)

        return metadata

    def _get_secret_metadata_for_secret(
        self,
        secret: Union[SecretRequestModel, SecretResponseModel],
        secret_id: Optional[UUID] = None,
    ) -> Dict[str, str]:
        """Get a dictionary with the secrets metadata describing a secret.

        This utility method can be used with Secrets Managers that can
        associate metadata (e.g. tags, labels) with a secret. The metadata can
        be configured alongside each secret and then used as a filter criteria
        when running queries against the backend.

        Args:
            secret: The secret to get the metadata for.
            secret_id: Optional secret ID to include in the metadata (if not
                already included in the secret).

        Returns:
            Dictionary with secret metadata information.
        """
        if isinstance(secret, SecretRequestModel):
            return self._get_secret_metadata(
                secret_id=secret_id,
                secret_name=secret.name,
                scope=secret.scope,
                workspace=secret.workspace,
                user=secret.user,
            )

        return self._get_secret_metadata(
            secret_id=secret.id,
            secret_name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace.id,
            user=secret.user.id if secret.user else None,
        )

    def _create_secret_from_metadata(
        self,
        metadata: Dict[str, str],
        created: datetime,
        updated: datetime,
        values: Optional[Dict[str, str]] = None,
    ) -> SecretResponseModel:
        """Create a ZenML secret model from metadata stored in the secrets store backend.

        Args:
            metadata: ZenML secret metadata collected from the backend secret
                (e.g. from secret tags/labels).
            created: The secret creation time.
            updated: The secret last updated time.
            values: The secret values (optional).

        Returns:
            The ZenML secret.

        Raises:
            KeyError: If the secret does not have the required metadata, if it
                is not managed by this ZenML instance or if it is linked to a
                user or workspace that no longer exists.
        """
        # Double-check that the secret is managed by this ZenML instance.
        if metadata.get(ZENML_SECRET_LABEL) != str(
            self.zen_store.get_store_info().id
        ):
            raise KeyError("Secret is not managed by this ZenML instance")

        # Recover the ZenML secret fields from the input secret metadata.
        try:
            secret_id = UUID(metadata[ZENML_SECRET_ID_LABEL])
            name = metadata[ZENML_SECRET_NAME_LABEL]
            scope = SecretScope(metadata[ZENML_SECRET_SCOPE_LABEL])
            workspace_id = UUID(metadata[ZENML_SECRET_WORKSPACE_LABEL])
            user_id = UUID(metadata[ZENML_SECRET_USER_LABEL])
        except KeyError as e:
            raise KeyError(
                f"Secret could not be retrieved: missing required metadata: {e}"
            )

        try:
            user, workspace = self._validate_user_and_workspace(
                user_id, workspace_id
            )
        except KeyError as e:
            # The user or workspace associated with the secret no longer
            # exists. This can happen if the user or workspace is being
            # deleted nearly at the same time as this call. In this case, we
            # raise a KeyError exception. The caller should handle this
            # exception by assuming that the secret no longer exists.
            logger.warning(
                f"Secret with ID {secret_id} is associated with a "
                f"non-existent user or workspace. Silently ignoring the "
                f"secret: {e}"
            )
            raise KeyError(
                f"Secret with ID {secret_id} could not be retrieved: "
                f"the secret is associated with a non-existent user or "
                f"workspace: {e}"
            )

        secret_model = SecretResponseModel(
            id=secret_id,
            name=name,
            scope=scope,
            workspace=workspace,
            user=user,
            values=values if values else {},
            created=created,
            updated=updated,
        )

        return secret_model

    # ---------
    # Analytics
    # ---------

    def track_event(
        self,
        event: AnalyticsEvent,
        metadata: Optional[Dict[str, Any]] = None,
    ) -> None:
        """Track an analytics event.

        Args:
            event: The event to track.
            metadata: Additional metadata to track with the event.
        """
        if self.track_analytics:
            # Server information is always tracked, if available.
            track_event(event, metadata)

    class Config:
        """Pydantic configuration class."""

        # Validate attributes when assigning them. We need to set this in order
        # to have a mix of mutable and immutable attributes
        validate_assignment = True
        # Ignore extra attributes from configs of previous ZenML versions
        extra = "ignore"
        # all attributes with leading underscore are private and therefore
        # are mutable and not included in serialization
        underscore_attrs_are_private = True
type: SecretsStoreType property readonly

The type of the secrets store.

Returns:

Type Description
SecretsStoreType

The type of the secrets store.

zen_store: BaseZenStore property readonly

The ZenML store that owns this secrets store.

Returns:

Type Description
BaseZenStore

The ZenML store that owns this secrets store.

Exceptions:

Type Description
ValueError

If the store is not initialized.

Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/base_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Validate attributes when assigning them. We need to set this in order
    # to have a mix of mutable and immutable attributes
    validate_assignment = True
    # Ignore extra attributes from configs of previous ZenML versions
    extra = "ignore"
    # all attributes with leading underscore are private and therefore
    # are mutable and not included in serialization
    underscore_attrs_are_private = True
__init__(self, zen_store, **kwargs) special

Create and initialize a secrets store.

Parameters:

Name Type Description Default
zen_store BaseZenStore

The ZenML store that owns this secrets store.

required
**kwargs Any

Additional keyword arguments to pass to the Pydantic constructor.

{}

Exceptions:

Type Description
RuntimeError

If the store cannot be initialized.

Source code in zenml/zen_stores/secrets_stores/base_secrets_store.py
def __init__(
    self,
    zen_store: "BaseZenStore",
    **kwargs: Any,
) -> None:
    """Create and initialize a secrets store.

    Args:
        zen_store: The ZenML store that owns this secrets store.
        **kwargs: Additional keyword arguments to pass to the Pydantic
            constructor.

    Raises:
        RuntimeError: If the store cannot be initialized.
    """
    super().__init__(**kwargs)
    self._zen_store = zen_store

    self.zen_store.register_event_handler(
        StoreEvent.WORKSPACE_DELETED, self._on_workspace_deleted
    )

    self.zen_store.register_event_handler(
        StoreEvent.USER_DELETED, self._on_user_deleted
    )

    try:
        self._initialize()
    except Exception as e:
        raise RuntimeError(
            f"Error initializing {self.type.value} secrets store: {str(e)}"
        ) from e
create_store(config, **kwargs) staticmethod

Create and initialize a secrets store from a secrets store configuration.

Parameters:

Name Type Description Default
config SecretsStoreConfiguration

The secrets store configuration to use.

required
**kwargs Any

Additional keyword arguments to pass to the store class

{}

Returns:

Type Description
BaseSecretsStore

The initialized secrets store.

Source code in zenml/zen_stores/secrets_stores/base_secrets_store.py
@staticmethod
def create_store(
    config: SecretsStoreConfiguration,
    **kwargs: Any,
) -> "BaseSecretsStore":
    """Create and initialize a secrets store from a secrets store configuration.

    Args:
        config: The secrets store configuration to use.
        **kwargs: Additional keyword arguments to pass to the store class

    Returns:
        The initialized secrets store.
    """
    logger.debug(
        f"Creating secrets store with type '{config.type.value}'..."
    )
    store_class = BaseSecretsStore.get_store_class(config)
    store = store_class(
        config=config,
        **kwargs,
    )
    return store
get_store_class(store_config) staticmethod

Returns the class of the given secrets store type.

Parameters:

Name Type Description Default
store_config SecretsStoreConfiguration

The configuration of the secrets store.

required

Returns:

Type Description
Type[BaseSecretsStore]

The class corresponding to the configured secrets store or None if the type is unknown.

Exceptions:

Type Description
TypeError

If the secrets store type is unsupported.

Source code in zenml/zen_stores/secrets_stores/base_secrets_store.py
@staticmethod
def get_store_class(
    store_config: SecretsStoreConfiguration,
) -> Type["BaseSecretsStore"]:
    """Returns the class of the given secrets store type.

    Args:
        store_config: The configuration of the secrets store.

    Returns:
        The class corresponding to the configured secrets store or None if
        the type is unknown.

    Raises:
        TypeError: If the secrets store type is unsupported.
    """
    if store_config.type == SecretsStoreType.SQL:
        from zenml.zen_stores.secrets_stores.sql_secrets_store import (
            SqlSecretsStore,
        )

        return SqlSecretsStore

    if store_config.type == SecretsStoreType.REST:
        from zenml.zen_stores.secrets_stores.rest_secrets_store import (
            RestSecretsStore,
        )

        return RestSecretsStore

    if store_config.type == SecretsStoreType.AWS:
        from zenml.zen_stores.secrets_stores.aws_secrets_store import (
            AWSSecretsStore,
        )

        return AWSSecretsStore
    elif store_config.type == SecretsStoreType.GCP:
        from zenml.zen_stores.secrets_stores.gcp_secrets_store import (
            GCPSecretsStore,
        )

        return GCPSecretsStore
    elif store_config.type == SecretsStoreType.AZURE:
        from zenml.zen_stores.secrets_stores.azure_secrets_store import (
            AzureSecretsStore,
        )

        return AzureSecretsStore
    elif store_config.type == SecretsStoreType.HASHICORP:
        from zenml.zen_stores.secrets_stores.hashicorp_secrets_store import (
            HashiCorpVaultSecretsStore,
        )

        return HashiCorpVaultSecretsStore
    elif store_config.type != SecretsStoreType.CUSTOM:
        raise TypeError(
            f"No store implementation found for secrets store type "
            f"`{store_config.type.value}`."
        )

    return BaseSecretsStore._load_custom_store_class(store_config)
track_event(self, event, metadata=None)

Track an analytics event.

Parameters:

Name Type Description Default
event AnalyticsEvent

The event to track.

required
metadata Optional[Dict[str, Any]]

Additional metadata to track with the event.

None
Source code in zenml/zen_stores/secrets_stores/base_secrets_store.py
def track_event(
    self,
    event: AnalyticsEvent,
    metadata: Optional[Dict[str, Any]] = None,
) -> None:
    """Track an analytics event.

    Args:
        event: The event to track.
        metadata: Additional metadata to track with the event.
    """
    if self.track_analytics:
        # Server information is always tracked, if available.
        track_event(event, metadata)

gcp_secrets_store

Implementation of the GCP Secrets Store.

GCPSecretsStore (BaseSecretsStore) pydantic-model

Secrets store implementation that uses the GCP Secrets Manager API.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
class GCPSecretsStore(BaseSecretsStore):
    """Secrets store implementation that uses the GCP Secrets Manager API."""

    config: GCPSecretsStoreConfiguration
    TYPE: ClassVar[SecretsStoreType] = SecretsStoreType.GCP
    CONFIG_TYPE: ClassVar[
        Type[SecretsStoreConfiguration]
    ] = GCPSecretsStoreConfiguration

    _client: Optional[SecretManagerServiceClient] = None

    def _initialize(self) -> None:
        """Initialize the GCP secrets store."""
        logger.debug("Initializing GCPSecretsStore")

        # Initialize the GCP client.
        _ = self.client

    @property
    def client(self) -> Any:
        """Initialize and return the GCP Secrets Manager client.

        Returns:
            The GCP Secrets Manager client.
        """
        if self._client is None:
            self._client = SecretManagerServiceClient()
        return self._client

    @property
    def parent_name(self) -> str:
        """Construct the GCP parent path to the secret manager.

        Returns:
            The parent path to the secret manager
        """
        return f"projects/{self.config.project_id}"

    def _get_secret_labels(
        self, secret: Union[SecretRequestModel, SecretResponseModel]
    ) -> List[Tuple[str, str]]:
        """Return a list of Google secret label values for a given secret.

        Args:
            secret: the secret object

        Returns:
            A list of Google secret label values
        """
        metadata = self._get_secret_metadata_for_secret(secret)
        return list(metadata.items())

    def _validate_gcp_secret_name(self, name: str) -> None:
        """Validate a secret name.

        Given that we save secret names as labels, we are also limited by the
        limitation that Google imposes on label values: max 63 characters and
        must only contain lowercase letters, numerals and the hyphen (-) and
        underscore (_) characters.

        Args:
            name: the secret name

        Raises:
            ValueError: if the secret name is invalid
        """
        if not re.fullmatch(r"[a-z0-9_\-]+", name):
            raise ValueError(
                f"Invalid secret name '{name}'. Must contain "
                f"only lowercase alphanumeric characters and the hyphen (-) and "
                f"underscore (_) characters."
            )

        if name and len(name) > 63:
            raise ValueError(
                f"Invalid secret name '{name}'. The length is "
                f"limited to maximum 63 characters."
            )

    def _get_gcp_secret_name(
        self,
        secret_id: UUID,
    ) -> str:
        """Get the GCP secret name for the given secret.

        The convention used for GCP secret names is to use the ZenML
        secret UUID prefixed with `zenml` as the AWS secret name,
        i.e. `zenml/<secret_uuid>`.

        Args:
            secret_id: The ZenML secret ID.

        Returns:
            The GCP secret name.
        """
        return f"{GCP_ZENML_SECRET_NAME_PREFIX}-{str(secret_id)}"

    def _convert_gcp_secret(
        self,
        labels: Dict[str, str],
        values: Optional[Dict[str, str]] = None,
    ) -> SecretResponseModel:
        """Create a ZenML secret model from data stored in an GCP secret.

        If the GCP secret cannot be converted, the method acts as if the
        secret does not exist and raises a KeyError.

        Args:
            labels: The GCP secret labels.
            values: The GCP secret values.

        Returns:
            The ZenML secret model.

        Raises:
            KeyError: if the GCP secret cannot be converted.
        """
        # Recover the ZenML secret metadata from the AWS secret tags.

        # The GCP secret labels do not really behave like a dictionary: when
        # a key is not found, it does not raise a KeyError, but instead
        # returns an empty string. That's why we make this conversion.
        label_dict = dict(labels)

        try:
            created = datetime.strptime(
                label_dict[ZENML_GCP_SECRET_CREATED_KEY],
                ZENML_GCP_DATE_FORMAT_STRING,
            )
            updated = datetime.strptime(
                label_dict[ZENML_GCP_SECRET_UPDATED_KEY],
                ZENML_GCP_DATE_FORMAT_STRING,
            )
        except KeyError as e:
            raise KeyError(
                f"Invalid GCP secret: missing required tag '{e}'"
            ) from e

        return self._create_secret_from_metadata(
            metadata=label_dict,
            created=created,
            updated=updated,
            values=values,
        )

    def _get_gcp_filter_string(
        self, secret_filter_model: SecretFilterModel
    ) -> str:
        """Convert a SecretFilterModel to a GCP filter string.

        Args:
            secret_filter_model: The secret filter model.

        Returns:
            The GCP filter string.
        """
        operator_map = {
            "equals": ":",
        }
        filter_terms = []
        for filter in secret_filter_model.list_of_filters:
            filter_terms.append(
                f"{filter.column}{operator_map[filter.operation.value]}{filter.value}"
            )

        return f" {secret_filter_model.logical_operator.name} ".join(
            filter_terms
        )

    @track(AnalyticsEvent.CREATED_SECRET, v2=True)
    def create_secret(self, secret: SecretRequestModel) -> SecretResponseModel:
        """Create a new secret.

        The new secret is also validated against the scoping rules enforced in
        the secrets store:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret: The secret to create.

        Returns:
            The created secret.

        Raises:
            RuntimeError: if the secret was unable to be created.
            EntityExistsError: If a secret with the same name already exists
                in the same scope.
        """
        self._validate_gcp_secret_name(secret.name)

        user, workspace = self._validate_user_and_workspace(
            secret.user, secret.workspace
        )

        # Check if a secret with the same name already exists in the same
        # scope.
        secret_exists, msg = self._check_secret_scope(
            secret_name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace,
            user=secret.user,
        )
        if secret_exists:
            raise EntityExistsError(msg)

        secret_id = uuid.uuid4()
        secret_value = json.dumps(secret.secret_values)

        created = datetime.utcnow().replace(tzinfo=None, microsecond=0)
        labels = self._get_secret_metadata_for_secret(
            secret=secret, secret_id=secret_id
        )
        labels[ZENML_GCP_SECRET_CREATED_KEY] = created.strftime(
            ZENML_GCP_DATE_FORMAT_STRING
        )
        labels[ZENML_GCP_SECRET_UPDATED_KEY] = created.strftime(
            ZENML_GCP_DATE_FORMAT_STRING
        )

        try:
            gcp_secret = self.client.create_secret(
                request={
                    "parent": self.parent_name,
                    "secret_id": self._get_gcp_secret_name(secret_id),
                    "secret": {
                        "replication": {"automatic": {}},
                        "labels": labels,
                    },
                }
            )

            logger.debug("Created empty parent secret: %s", gcp_secret.name)

            self.client.add_secret_version(
                request={
                    "parent": gcp_secret.name,
                    "payload": {"data": secret_value.encode()},
                }
            )
        except Exception as e:
            raise RuntimeError(f"Failed to create secret.: {str(e)}") from e

        logger.debug("Added value to secret.")

        return SecretResponseModel(
            id=secret_id,
            name=secret.name,
            scope=secret.scope,
            workspace=workspace,
            user=user,
            values=secret.secret_values,
            created=created,
            updated=created,
        )

    def get_secret(self, secret_id: UUID) -> SecretResponseModel:
        """Get a secret by ID.

        Args:
            secret_id: The ID of the secret to fetch.

        Returns:
            The secret.

        Raises:
            KeyError: If the secret does not exist.
            RuntimeError: If the GCP Secrets Manager API returns an unexpected
                error.
        """
        gcp_secret_name = self.client.secret_path(
            self.config.project_id,
            self._get_gcp_secret_name(secret_id=secret_id),
        )

        try:
            secret = self.client.get_secret(name=gcp_secret_name)
            secret_version_values = self.client.access_secret_version(
                name=f"{gcp_secret_name}/versions/latest"
            )
        except google_exceptions.NotFound as e:
            raise KeyError(
                f"Can't find the specified secret for secret_id '{secret_id}': {str(e)}"
            ) from e
        except Exception as e:
            raise RuntimeError(
                f"Error fetching secret with ID {secret_id} {e}"
            )

        secret_values = json.loads(
            secret_version_values.payload.data.decode("UTF-8")
        )

        return self._convert_gcp_secret(
            labels=secret.labels,
            values=secret_values,
        )

    def list_secrets(
        self, secret_filter_model: SecretFilterModel
    ) -> Page[SecretResponseModel]:
        """List all secrets matching the given filter criteria.

        Note that returned secrets do not include any secret values. To fetch
        the secret values, use `get_secret`.

        Args:
            secret_filter_model: The filter criteria.

        Returns:
            A list of all secrets matching the filter criteria, with pagination
            information and sorted according to the filter criteria. The
            returned secrets do not include any secret values, only metadata. To
            fetch the secret values, use `get_secret` individually with each
            secret.

        Raises:
            ValueError: If the filter contains an out-of-bounds page number.
            RuntimeError: If the Azure Key Vault API returns an unexpected
                error.
        """
        # TODO: implement filter method for server-side filtering
        # convert the secret_filter_model to a GCP filter string
        gcp_filters = ""
        # gcp_filters = self._get_gcp_filter_string(
        #     secret_filter_model=secret_filter_model
        # )

        try:
            # get all the secrets and their labels (for their names) from GCP
            # (use the filter string to limit what doesn't match the filter)
            secrets = []
            for secret in self.client.list_secrets(
                request={
                    "parent": self.parent_name,
                    "filter": gcp_filters,
                }
            ):
                try:
                    secrets.append(self._convert_gcp_secret(secret.labels))
                except KeyError:
                    # keep going / ignore if this secret version doesn't exist
                    # or isn't a ZenML secret
                    continue
        except Exception as e:
            raise RuntimeError(f"Error listing GCP secrets: {e}") from e

        # do client filtering for anything not covered by the filter string
        filtered_secrets = [
            secret
            for secret in secrets
            if secret_filter_model.secret_matches(secret)
        ]

        # sort the results
        sorted_results = secret_filter_model.sort_secrets(filtered_secrets)

        # paginate the results
        secret_count = len(sorted_results)
        if secret_count == 0:
            total_pages = 1
        else:
            total_pages = math.ceil(secret_count / secret_filter_model.size)
        if secret_filter_model.page > total_pages:
            raise ValueError(
                f"Invalid page {secret_filter_model.page}. The requested page "
                f"size is {secret_filter_model.size} and there are a total of "
                f"{secret_count} items for this query. The maximum page value "
                f"therefore is {total_pages}."
            )
        return Page(
            total=secret_count,
            total_pages=total_pages,
            items=sorted_results[
                (secret_filter_model.page - 1)
                * secret_filter_model.size : secret_filter_model.page
                * secret_filter_model.size
            ],
            index=secret_filter_model.page,
            max_size=secret_filter_model.size,
        )

    @track(AnalyticsEvent.UPDATED_SECRET)
    def update_secret(
        self, secret_id: UUID, secret_update: SecretUpdateModel
    ) -> SecretResponseModel:
        """Update a secret.

        Secret values that are specified as `None` in the update that are
        present in the existing secret are removed from the existing secret.
        Values that are present in both secrets are overwritten. All other
        values in both the existing secret and the update are kept (merged).

        If the update includes a change of name or scope, the scoping rules
        enforced in the secrets store are used to validate the update:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret_id: The ID of the secret to update.
            secret_update: The update to apply to the secret.

        Returns:
            The updated secret.

        Raises:
            RuntimeError: If the secret update is invalid.
            EntityExistsError: If the update includes a change of name or
                scope and a secret with the same name already exists in the
                same scope.
        """
        secret = self.get_secret(secret_id=secret_id)
        gcp_secret_name = self.client.secret_path(
            self.config.project_id,
            self._get_gcp_secret_name(secret_id=secret_id),
        )

        assert secret.user is not None
        self._validate_user_and_workspace_update(
            secret_update=secret_update,
            current_user=secret.user.id,
            current_workspace=secret.workspace.id,
        )

        if secret_update.name is not None:
            self._validate_gcp_secret_name(secret_update.name)
            secret.name = secret_update.name
        if secret_update.scope is not None:
            secret.scope = secret_update.scope
        if secret_update.values is not None:
            # Merge the existing values with the update values.
            # The values that are set to `None` in the update are removed from
            # the existing secret when we call `.secret_values` later.
            secret.values.update(secret_update.values)

        if secret_update.name is not None or secret_update.scope is not None:
            # Check if a secret with the same name already exists in the same
            # scope.
            assert secret.user is not None
            secret_exists, msg = self._check_secret_scope(
                secret_name=secret.name,
                scope=secret.scope,
                workspace=secret.workspace.id,
                user=secret.user.id,
                exclude_secret_id=secret.id,
            )
            if secret_exists:
                raise EntityExistsError(msg)

        # Convert the ZenML secret metadata to GCP labels
        updated = datetime.utcnow().replace(tzinfo=None, microsecond=0)
        metadata = self._get_secret_metadata_for_secret(secret)
        metadata[ZENML_GCP_SECRET_UPDATED_KEY] = updated.strftime(
            ZENML_GCP_DATE_FORMAT_STRING
        )
        metadata[ZENML_GCP_SECRET_CREATED_KEY] = secret.created.strftime(
            ZENML_GCP_DATE_FORMAT_STRING
        )

        try:
            # UPDATE THE SECRET METADATA
            update_secret = {
                "name": gcp_secret_name,
                "labels": metadata,
            }
            update_mask = {"paths": ["labels"]}
            gcp_updated_secret = self.client.update_secret(
                request={
                    "secret": update_secret,
                    "update_mask": update_mask,
                }
            )
            # ADD A NEW SECRET VERSION
            secret_value = json.dumps(secret.secret_values)
            self.client.add_secret_version(
                request={
                    "parent": gcp_updated_secret.name,
                    "payload": {"data": secret_value.encode()},
                }
            )
        except Exception as e:
            raise RuntimeError(f"Error updating secret: {e}") from e

        logger.debug("Updated GCP secret: %s", gcp_secret_name)

        return SecretResponseModel(
            id=secret_id,
            name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace,
            user=secret.user,
            values=secret.secret_values,
            created=secret.created,
            updated=updated,
        )

    @track(AnalyticsEvent.DELETED_SECRET)
    def delete_secret(self, secret_id: UUID) -> None:
        """Delete a secret.

        Args:
            secret_id: The ID of the secret to delete.

        Raises:
            KeyError: If the secret could not be found.
            RuntimeError: If the secret could not be deleted.
        """
        gcp_secret_name = self.client.secret_path(
            self.config.project_id,
            self._get_gcp_secret_name(secret_id=secret_id),
        )

        try:
            self.client.delete_secret(request={"name": gcp_secret_name})
        except google_exceptions.NotFound:
            raise KeyError(f"Secret with ID {secret_id} not found")
        except Exception as e:
            raise RuntimeError(f"Failed to delete secret: {str(e)}") from e
client: Any property readonly

Initialize and return the GCP Secrets Manager client.

Returns:

Type Description
Any

The GCP Secrets Manager client.

parent_name: str property readonly

Construct the GCP parent path to the secret manager.

Returns:

Type Description
str

The parent path to the secret manager

CONFIG_TYPE (SecretsStoreConfiguration) pydantic-model

GCP secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

project_id str

The GCP project ID where the secrets are stored.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
class GCPSecretsStoreConfiguration(SecretsStoreConfiguration):
    """GCP secrets store configuration.

    Attributes:
        type: The type of the store.
        project_id: The GCP project ID where the secrets are stored.

    """

    type: SecretsStoreType = SecretsStoreType.GCP
    project_id: str

    class Config:
        """Pydantic configuration class."""

        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Forbid extra attributes set in the class.
    extra = "forbid"
create_secret(*args, **kwargs)

Create a new secret.

The new secret is also validated against the scoping rules enforced in the secrets store:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret

The secret to create.

required

Returns:

Type Description
Any

The created secret.

Exceptions:

Type Description
RuntimeError

if the secret was unable to be created.

EntityExistsError

If a secret with the same name already exists in the same scope.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_secret(*args, **kwargs)

Delete a secret.

Parameters:

Name Type Description Default
secret_id

The ID of the secret to delete.

required

Exceptions:

Type Description
KeyError

If the secret could not be found.

RuntimeError

If the secret could not be deleted.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
get_secret(self, secret_id)

Get a secret by ID.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to fetch.

required

Returns:

Type Description
SecretResponseModel

The secret.

Exceptions:

Type Description
KeyError

If the secret does not exist.

RuntimeError

If the GCP Secrets Manager API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
def get_secret(self, secret_id: UUID) -> SecretResponseModel:
    """Get a secret by ID.

    Args:
        secret_id: The ID of the secret to fetch.

    Returns:
        The secret.

    Raises:
        KeyError: If the secret does not exist.
        RuntimeError: If the GCP Secrets Manager API returns an unexpected
            error.
    """
    gcp_secret_name = self.client.secret_path(
        self.config.project_id,
        self._get_gcp_secret_name(secret_id=secret_id),
    )

    try:
        secret = self.client.get_secret(name=gcp_secret_name)
        secret_version_values = self.client.access_secret_version(
            name=f"{gcp_secret_name}/versions/latest"
        )
    except google_exceptions.NotFound as e:
        raise KeyError(
            f"Can't find the specified secret for secret_id '{secret_id}': {str(e)}"
        ) from e
    except Exception as e:
        raise RuntimeError(
            f"Error fetching secret with ID {secret_id} {e}"
        )

    secret_values = json.loads(
        secret_version_values.payload.data.decode("UTF-8")
    )

    return self._convert_gcp_secret(
        labels=secret.labels,
        values=secret_values,
    )
list_secrets(self, secret_filter_model)

List all secrets matching the given filter criteria.

Note that returned secrets do not include any secret values. To fetch the secret values, use get_secret.

Parameters:

Name Type Description Default
secret_filter_model SecretFilterModel

The filter criteria.

required

Returns:

Type Description
Page[SecretResponseModel]

A list of all secrets matching the filter criteria, with pagination information and sorted according to the filter criteria. The returned secrets do not include any secret values, only metadata. To fetch the secret values, use get_secret individually with each secret.

Exceptions:

Type Description
ValueError

If the filter contains an out-of-bounds page number.

RuntimeError

If the Azure Key Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
def list_secrets(
    self, secret_filter_model: SecretFilterModel
) -> Page[SecretResponseModel]:
    """List all secrets matching the given filter criteria.

    Note that returned secrets do not include any secret values. To fetch
    the secret values, use `get_secret`.

    Args:
        secret_filter_model: The filter criteria.

    Returns:
        A list of all secrets matching the filter criteria, with pagination
        information and sorted according to the filter criteria. The
        returned secrets do not include any secret values, only metadata. To
        fetch the secret values, use `get_secret` individually with each
        secret.

    Raises:
        ValueError: If the filter contains an out-of-bounds page number.
        RuntimeError: If the Azure Key Vault API returns an unexpected
            error.
    """
    # TODO: implement filter method for server-side filtering
    # convert the secret_filter_model to a GCP filter string
    gcp_filters = ""
    # gcp_filters = self._get_gcp_filter_string(
    #     secret_filter_model=secret_filter_model
    # )

    try:
        # get all the secrets and their labels (for their names) from GCP
        # (use the filter string to limit what doesn't match the filter)
        secrets = []
        for secret in self.client.list_secrets(
            request={
                "parent": self.parent_name,
                "filter": gcp_filters,
            }
        ):
            try:
                secrets.append(self._convert_gcp_secret(secret.labels))
            except KeyError:
                # keep going / ignore if this secret version doesn't exist
                # or isn't a ZenML secret
                continue
    except Exception as e:
        raise RuntimeError(f"Error listing GCP secrets: {e}") from e

    # do client filtering for anything not covered by the filter string
    filtered_secrets = [
        secret
        for secret in secrets
        if secret_filter_model.secret_matches(secret)
    ]

    # sort the results
    sorted_results = secret_filter_model.sort_secrets(filtered_secrets)

    # paginate the results
    secret_count = len(sorted_results)
    if secret_count == 0:
        total_pages = 1
    else:
        total_pages = math.ceil(secret_count / secret_filter_model.size)
    if secret_filter_model.page > total_pages:
        raise ValueError(
            f"Invalid page {secret_filter_model.page}. The requested page "
            f"size is {secret_filter_model.size} and there are a total of "
            f"{secret_count} items for this query. The maximum page value "
            f"therefore is {total_pages}."
        )
    return Page(
        total=secret_count,
        total_pages=total_pages,
        items=sorted_results[
            (secret_filter_model.page - 1)
            * secret_filter_model.size : secret_filter_model.page
            * secret_filter_model.size
        ],
        index=secret_filter_model.page,
        max_size=secret_filter_model.size,
    )
update_secret(*args, **kwargs)

Update a secret.

Secret values that are specified as None in the update that are present in the existing secret are removed from the existing secret. Values that are present in both secrets are overwritten. All other values in both the existing secret and the update are kept (merged).

If the update includes a change of name or scope, the scoping rules enforced in the secrets store are used to validate the update:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret_id

The ID of the secret to update.

required
secret_update

The update to apply to the secret.

required

Returns:

Type Description
Any

The updated secret.

Exceptions:

Type Description
RuntimeError

If the secret update is invalid.

EntityExistsError

If the update includes a change of name or scope and a secret with the same name already exists in the same scope.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
GCPSecretsStoreConfiguration (SecretsStoreConfiguration) pydantic-model

GCP secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

project_id str

The GCP project ID where the secrets are stored.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
class GCPSecretsStoreConfiguration(SecretsStoreConfiguration):
    """GCP secrets store configuration.

    Attributes:
        type: The type of the store.
        project_id: The GCP project ID where the secrets are stored.

    """

    type: SecretsStoreType = SecretsStoreType.GCP
    project_id: str

    class Config:
        """Pydantic configuration class."""

        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/gcp_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Forbid extra attributes set in the class.
    extra = "forbid"

hashicorp_secrets_store

HashiCorp Vault Secrets Store implementation.

HashiCorpVaultSecretsStore (BaseSecretsStore) pydantic-model

Secrets store implementation that uses the HashiCorp Vault API.

This secrets store implementation uses the HashiCorp Vault API to store secrets. It allows a single HashiCorp Vault server to be shared with other ZenML deployments as well as other third party users and applications.

Here are some implementation highlights:

  • the name/ID of an HashiCorp Vault secret is derived from the ZenML secret UUID and a zenml prefix in the form zenml/{zenml_secret_uuid}. This clearly identifies a secret as being managed by ZenML in the HashiCorp Vault server. This also allows use to reduce the scope of list_secrets to cover only secrets managed by ZenML by using zenml/ as the path prefix.

  • given that HashiCorp Vault secrets do not support attaching arbitrary metadata in the form of label or tags, we store the entire ZenML secret metadata (e.g. name, scope, etc.) alongside the secret values in the HashiCorp Vault secret value.

  • when a user or workspace is deleted, the secrets associated with it are deleted automatically via registered event handlers.

Known challenges and limitations:

  • HashiCorp Vault secrets do not support filtering secrets by metadata attached to secrets in the form of label or tags. This means that we cannot filter secrets server-side based on their metadata (e.g. name, scope, etc.). Instead, we have to retrieve all ZenML managed secrets and filter them client-side.

  • HashiCorp Vault secrets are versioned. This means that when a secret is updated, a new version is created which has its own creation timestamp. Furthermore, older secret versions are deleted automatically after a certain configurable number of versions is reached. To work around this, we also manage created and updated timestamps here and store them in the secret value itself.

Attributes:

Name Type Description
config

The configuration of the HashiCorp Vault secrets store.

TYPE

The type of the store.

CONFIG_TYPE

The type of the store configuration.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
class HashiCorpVaultSecretsStore(BaseSecretsStore):
    """Secrets store implementation that uses the HashiCorp Vault API.

    This secrets store implementation uses the HashiCorp Vault API to
    store secrets. It allows a single HashiCorp Vault server to be shared with
    other ZenML deployments as well as other third party users and applications.

    Here are some implementation highlights:

    * the name/ID of an HashiCorp Vault secret is derived from the ZenML secret
    UUID and a `zenml` prefix in the form `zenml/{zenml_secret_uuid}`. This
    clearly identifies a secret as being managed by ZenML in the HashiCorp Vault
    server. This also allows use to reduce the scope of `list_secrets` to cover
    only secrets managed by ZenML by using `zenml/` as the path prefix.

    * given that HashiCorp Vault secrets do not support attaching arbitrary
    metadata in the form of label or tags, we store the entire ZenML secret
    metadata (e.g. name, scope, etc.) alongside the secret values in the
    HashiCorp Vault secret value.

    * when a user or workspace is deleted, the secrets associated with it are
    deleted automatically via registered event handlers.

    Known challenges and limitations:

    * HashiCorp Vault secrets do not support filtering secrets by metadata
    attached to secrets in the form of label or tags. This means that we cannot
    filter secrets server-side based on their metadata (e.g. name, scope, etc.).
    Instead, we have to retrieve all ZenML managed secrets and filter them
    client-side.

    * HashiCorp Vault secrets are versioned. This means that when a secret is
    updated, a new version is created which has its own creation timestamp.
    Furthermore, older secret versions are deleted automatically after a certain
    configurable number of versions is reached. To work around this, we also
    manage `created` and `updated` timestamps here and store them in the secret
    value itself.


    Attributes:
        config: The configuration of the HashiCorp Vault secrets store.
        TYPE: The type of the store.
        CONFIG_TYPE: The type of the store configuration.
    """

    config: HashiCorpVaultSecretsStoreConfiguration
    TYPE: ClassVar[SecretsStoreType] = SecretsStoreType.HASHICORP
    CONFIG_TYPE: ClassVar[
        Type[SecretsStoreConfiguration]
    ] = HashiCorpVaultSecretsStoreConfiguration

    _client: Optional[hvac.Client] = None

    @property
    def client(self) -> hvac.Client:
        """Initialize and return the HashiCorp Vault client.

        Returns:
            The HashiCorp Vault client.
        """
        if self._client is None:
            # Initialize the HashiCorp Vault client with the
            # credentials from the configuration.
            self._client = hvac.Client(
                url=self.config.vault_addr,
                token=self.config.vault_token.get_secret_value()
                if self.config.vault_token
                else None,
                namespace=self.config.vault_namespace,
            )
            self._client.secrets.kv.v2.configure(
                max_versions=self.config.max_versions,
            )
            if self.config.mount_point:
                self._client.secrets.kv.v2.configure(
                    mount_point=self.config.mount_point,
                )
        return self._client

    # ====================================
    # Secrets Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the HashiCorp Vault secrets store."""
        logger.debug("Initializing HashiCorpVaultSecretsStore")

        # Initialize the HashiCorp Vault client early, just to catch any
        # configuration or authentication errors early, before the Secrets
        # Store is used.
        _ = self.client

    # ------
    # Secrets
    # ------

    @staticmethod
    def _validate_vault_secret_name(name: str) -> None:
        """Validate a secret name.

        HashiCorp Vault secret names must contain only alphanumeric characters
        and the characters _+=.@-/.

        Args:
            name: the secret name

        Raises:
            ValueError: if the secret name is invalid
        """
        if not re.fullmatch(r"[a-zA-Z0-9_+=\.@\-/]*", name):
            raise ValueError(
                f"Invalid secret name or namespace '{name}'. Must contain "
                f"only alphanumeric characters and the characters _+=.@-/."
            )

    @staticmethod
    def _get_vault_secret_id(
        secret_id: UUID,
    ) -> str:
        """Get the HashiCorp Vault secret ID corresponding to a ZenML secret ID.

        The convention used for HashiCorp Vault secret names is to use the ZenML
        secret UUID prefixed with `zenml` as the HashiCorp Vault secret name,
        i.e. `zenml/<secret_uuid>`.

        Args:
            secret_id: The ZenML secret ID.

        Returns:
            The HashiCorp Vault secret name.
        """
        return f"{HVAC_ZENML_SECRET_NAME_PREFIX}/{str(secret_id)}"

    def _convert_vault_secret(
        self,
        vault_secret: Dict[str, Any],
    ) -> SecretResponseModel:
        """Create a ZenML secret model from data stored in an HashiCorp Vault secret.

        If the HashiCorp Vault secret cannot be converted, the method acts as if
        the secret does not exist and raises a KeyError.

        Args:
            vault_secret: The HashiCorp Vault secret in JSON form.

        Returns:
            The ZenML secret.

        Raises:
            KeyError: if the HashiCorp Vault secret cannot be converted.
        """
        try:
            metadata = vault_secret[ZENML_VAULT_SECRET_METADATA_KEY]
            values = vault_secret[ZENML_VAULT_SECRET_VALUES_KEY]
            created = datetime.fromisoformat(
                vault_secret[ZENML_VAULT_SECRET_CREATED_KEY],
            )
            updated = datetime.fromisoformat(
                vault_secret[ZENML_VAULT_SECRET_UPDATED_KEY],
            )
        except (KeyError, ValueError) as e:
            raise KeyError(
                f"Secret could not be retrieved: missing required metadata: {e}"
            )

        return self._create_secret_from_metadata(
            metadata=metadata,
            created=created,
            updated=updated,
            values=values,
        )

    @track(AnalyticsEvent.CREATED_SECRET, v2=True)
    def create_secret(self, secret: SecretRequestModel) -> SecretResponseModel:
        """Creates a new secret.

        The new secret is also validated against the scoping rules enforced in
        the secrets store:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret: The secret to create.

        Returns:
            The newly created secret.

        Raises:
            EntityExistsError: If a secret with the same name already exists
                in the same scope.
            RuntimeError: If the HashiCorp Vault API returns an unexpected
                error.
        """
        self._validate_vault_secret_name(secret.name)
        user, workspace = self._validate_user_and_workspace(
            secret.user, secret.workspace
        )

        # Check if a secret with the same name already exists in the same
        # scope.
        secret_exists, msg = self._check_secret_scope(
            secret_name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace,
            user=secret.user,
        )
        if secret_exists:
            raise EntityExistsError(msg)

        # Generate a new UUID for the secret
        secret_id = uuid.uuid4()
        vault_secret_id = self._get_vault_secret_id(secret_id)

        metadata = self._get_secret_metadata_for_secret(
            secret, secret_id=secret_id
        )

        created = datetime.utcnow()
        try:
            self.client.secrets.kv.v2.create_or_update_secret(
                path=vault_secret_id,
                # Store the ZenML secret metadata alongside the secret values
                secret={
                    ZENML_VAULT_SECRET_VALUES_KEY: secret.secret_values,
                    ZENML_VAULT_SECRET_METADATA_KEY: metadata,
                    ZENML_VAULT_SECRET_CREATED_KEY: created.isoformat(),
                    ZENML_VAULT_SECRET_UPDATED_KEY: created.isoformat(),
                },
                # Do not allow overwriting an existing secret
                cas=0,
            )
        except VaultError as e:
            raise RuntimeError(f"Error creating secret: {e}")

        logger.debug("Created HashiCorp Vault secret: %s", vault_secret_id)

        secret_model = SecretResponseModel(
            id=secret_id,
            name=secret.name,
            scope=secret.scope,
            workspace=workspace,
            user=user,
            values=secret.secret_values,
            created=created,
            updated=created,
        )

        return secret_model

    def get_secret(self, secret_id: UUID) -> SecretResponseModel:
        """Get a secret by ID.

        Args:
            secret_id: The ID of the secret to fetch.

        Returns:
            The secret.

        Raises:
            KeyError: If the secret does not exist.
            RuntimeError: If the HashiCorp Vault API returns an unexpected
                error.
        """
        vault_secret_id = self._get_vault_secret_id(secret_id)

        try:
            vault_secret = (
                self.client.secrets.kv.v2.read_secret(
                    path=vault_secret_id,
                )
                .get("data", {})
                .get("data", {})
            )
        except InvalidPath:
            raise KeyError(f"Secret with ID {secret_id} not found")
        except VaultError as e:
            raise RuntimeError(
                f"Error fetching secret with ID {secret_id} {e}"
            )

        # The _convert_vault_secret method raises a KeyError if the
        # secret is tied to a workspace or user that no longer exists. Here we
        # simply pass the exception up the stack, as if the secret was not found
        # in the first place, knowing that it will be cascade-deleted soon.
        return self._convert_vault_secret(
            vault_secret,
        )

    def list_secrets(
        self, secret_filter_model: SecretFilterModel
    ) -> Page[SecretResponseModel]:
        """List all secrets matching the given filter criteria.

        Note that returned secrets do not include any secret values. To fetch
        the secret values, use `get_secret`.

        Args:
            secret_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all secrets matching the filter criteria, with pagination
            information and sorted according to the filter criteria. The
            returned secrets do not include any secret values, only metadata. To
            fetch the secret values, use `get_secret` individually with each
            secret.

        Raises:
            ValueError: If the filter contains an out-of-bounds page number.
            RuntimeError: If the HashiCorp Vault API returns an unexpected
                error.
        """
        # The HashiCorp Vault API does not natively support any of the
        # filtering, sorting or pagination options that ZenML supports. The
        # implementation of this method therefore has to fetch all secrets from
        # the Key Vault, then apply the filtering, sorting and pagination on
        # the client side.

        results: List[SecretResponseModel] = []

        try:
            # List all ZenML secrets in the Vault
            all_secrets = (
                self.client.secrets.kv.v2.list_secrets(
                    path=HVAC_ZENML_SECRET_NAME_PREFIX
                )
                .get("data", {})
                .get("keys", [])
            )
        except InvalidPath:
            # no secrets created yet
            pass
        except VaultError as e:
            raise RuntimeError(f"Error listing HashiCorp Vault secrets: {e}")
        else:
            # Convert the Vault secrets to ZenML secrets
            for secret_uuid in all_secrets:
                vault_secret_id = (
                    f"{HVAC_ZENML_SECRET_NAME_PREFIX}/{secret_uuid}"
                )
                try:
                    vault_secret = (
                        self.client.secrets.kv.v2.read_secret(
                            path=vault_secret_id
                        )
                        .get("data", {})
                        .get("data", {})
                    )
                except (InvalidPath, VaultError) as e:
                    logging.warning(
                        f"Error fetching secret with ID {vault_secret_id}: {e}",
                    )
                    continue

                try:
                    secret_model = self._convert_vault_secret(
                        vault_secret,
                    )
                except KeyError as e:
                    # The _convert_vault_secret method raises a KeyError
                    # if the secret is tied to a workspace or user that no
                    # longer exists or if it is otherwise not valid. Here we
                    # pretend that the secret does not exist.
                    logging.warning(
                        f"Error fetching secret with ID {vault_secret_id}: {e}",
                    )
                    continue

                # Filter the secret on the client side.
                if not secret_filter_model.secret_matches(secret_model):
                    continue

                # Remove the secret values from the response
                secret_model.values = {}
                results.append(secret_model)

        # Sort the results
        sorted_results = secret_filter_model.sort_secrets(results)

        # Paginate the results
        total = len(sorted_results)
        if total == 0:
            total_pages = 1
        else:
            total_pages = math.ceil(total / secret_filter_model.size)

        if secret_filter_model.page > total_pages:
            raise ValueError(
                f"Invalid page {secret_filter_model.page}. The requested page "
                f"size is {secret_filter_model.size} and there are a total of "
                f"{total} items for this query. The maximum page value "
                f"therefore is {total_pages}."
            )

        return Page(
            total=total,
            total_pages=total_pages,
            items=sorted_results[
                (secret_filter_model.page - 1)
                * secret_filter_model.size : secret_filter_model.page
                * secret_filter_model.size
            ],
            index=secret_filter_model.page,
            max_size=secret_filter_model.size,
        )

    @track(AnalyticsEvent.UPDATED_SECRET)
    def update_secret(
        self, secret_id: UUID, secret_update: SecretUpdateModel
    ) -> SecretResponseModel:
        """Updates a secret.

        Secret values that are specified as `None` in the update that are
        present in the existing secret are removed from the existing secret.
        Values that are present in both secrets are overwritten. All other
        values in both the existing secret and the update are kept (merged).

        If the update includes a change of name or scope, the scoping rules
        enforced in the secrets store are used to validate the update:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret_id: The ID of the secret to be updated.
            secret_update: The update to be applied.

        Returns:
            The updated secret.

        Raises:
            KeyError: If the secret does not exist.
            EntityExistsError: If the update includes a change of name or
                scope and a secret with the same name already exists in the
                same scope.
            RuntimeError: If the HashiCorp Vault API returns an unexpected
                error.
        """
        secret = self.get_secret(secret_id)

        # Prevent changes to the secret's user or workspace
        assert secret.user is not None
        self._validate_user_and_workspace_update(
            secret_update=secret_update,
            current_user=secret.user.id,
            current_workspace=secret.workspace.id,
        )

        if secret_update.name is not None:
            self._validate_vault_secret_name(secret_update.name)
            secret.name = secret_update.name
        if secret_update.scope is not None:
            secret.scope = secret_update.scope
        if secret_update.values is not None:
            # Merge the existing values with the update values.
            # The values that are set to `None` in the update are removed from
            # the existing secret when we call `.secret_values` later.
            secret.values.update(secret_update.values)

        if secret_update.name is not None or secret_update.scope is not None:
            # Check if a secret with the same name already exists in the same
            # scope.
            assert secret.user is not None
            secret_exists, msg = self._check_secret_scope(
                secret_name=secret.name,
                scope=secret.scope,
                workspace=secret.workspace.id,
                user=secret.user.id,
                exclude_secret_id=secret.id,
            )
            if secret_exists:
                raise EntityExistsError(msg)

        vault_secret_id = self._get_vault_secret_id(secret_id)

        # Convert the ZenML secret metadata to HashiCorp Vault tags
        metadata = self._get_secret_metadata_for_secret(secret)

        updated = datetime.utcnow()
        try:
            self.client.secrets.kv.v2.create_or_update_secret(
                path=vault_secret_id,
                # Store the ZenML secret metadata alongside the secret values
                secret={
                    ZENML_VAULT_SECRET_VALUES_KEY: secret.secret_values,
                    ZENML_VAULT_SECRET_METADATA_KEY: metadata,
                    ZENML_VAULT_SECRET_CREATED_KEY: secret.created.isoformat(),
                    ZENML_VAULT_SECRET_UPDATED_KEY: updated.isoformat(),
                },
            )
        except InvalidPath:
            raise KeyError(f"Secret with ID {secret_id} does not exist.")
        except VaultError as e:
            raise RuntimeError(f"Error updating secret {secret_id}: {e}")

        logger.debug("Updated HashiCorp Vault secret: %s", vault_secret_id)

        secret_model = SecretResponseModel(
            id=secret_id,
            name=secret.name,
            scope=secret.scope,
            workspace=secret.workspace,
            user=secret.user,
            values=secret.secret_values,
            created=secret.created,
            updated=updated,
        )

        return secret_model

    @track(AnalyticsEvent.DELETED_SECRET)
    def delete_secret(self, secret_id: UUID) -> None:
        """Delete a secret.

        Args:
            secret_id: The id of the secret to delete.

        Raises:
            KeyError: If the secret does not exist.
            RuntimeError: If the HashiCorp Vault API returns an unexpected
                error.
        """
        try:
            self.client.secrets.kv.v2.delete_metadata_and_all_versions(
                path=self._get_vault_secret_id(secret_id),
            )
        except InvalidPath:
            raise KeyError(f"Secret with ID {secret_id} does not exist.")
        except VaultError as e:
            raise RuntimeError(
                f"Error deleting secret with ID {secret_id}: {e}"
            )
client: Client property readonly

Initialize and return the HashiCorp Vault client.

Returns:

Type Description
Client

The HashiCorp Vault client.

CONFIG_TYPE (SecretsStoreConfiguration) pydantic-model

HashiCorp Vault secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

vault_addr str

The url of the Vault server. If not set, the value will be loaded from the VAULT_ADDR environment variable, if configured.

vault_token Optional[pydantic.types.SecretStr]

The token used to authenticate with the Vault server. If not set, the token will be loaded from the VAULT_TOKEN environment variable or from the ~/.vault-token file, if configured.

vault_namespace Optional[str]

The Vault Enterprise namespace.

mount_point Optional[str]

The mount point to use for all secrets.

max_versions int

The maximum number of secret versions to keep.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
class HashiCorpVaultSecretsStoreConfiguration(SecretsStoreConfiguration):
    """HashiCorp Vault secrets store configuration.

    Attributes:
        type: The type of the store.
        vault_addr: The url of the Vault server. If not set, the value will be
            loaded from the VAULT_ADDR environment variable, if configured.
        vault_token: The token used to authenticate with the Vault server. If
            not set, the token will be loaded from the VAULT_TOKEN environment
            variable or from the ~/.vault-token file, if configured.
        vault_namespace: The Vault Enterprise namespace.
        mount_point: The mount point to use for all secrets.
        max_versions: The maximum number of secret versions to keep.
    """

    type: SecretsStoreType = SecretsStoreType.HASHICORP

    vault_addr: str
    vault_token: Optional[SecretStr] = None
    vault_namespace: Optional[str] = None
    mount_point: Optional[str] = None
    max_versions: int = 1

    class Config:
        """Pydantic configuration class."""

        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Forbid extra attributes set in the class.
    extra = "forbid"
create_secret(*args, **kwargs)

Creates a new secret.

The new secret is also validated against the scoping rules enforced in the secrets store:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret

The secret to create.

required

Returns:

Type Description
Any

The newly created secret.

Exceptions:

Type Description
EntityExistsError

If a secret with the same name already exists in the same scope.

RuntimeError

If the HashiCorp Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_secret(*args, **kwargs)

Delete a secret.

Parameters:

Name Type Description Default
secret_id

The id of the secret to delete.

required

Exceptions:

Type Description
KeyError

If the secret does not exist.

RuntimeError

If the HashiCorp Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
get_secret(self, secret_id)

Get a secret by ID.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to fetch.

required

Returns:

Type Description
SecretResponseModel

The secret.

Exceptions:

Type Description
KeyError

If the secret does not exist.

RuntimeError

If the HashiCorp Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
def get_secret(self, secret_id: UUID) -> SecretResponseModel:
    """Get a secret by ID.

    Args:
        secret_id: The ID of the secret to fetch.

    Returns:
        The secret.

    Raises:
        KeyError: If the secret does not exist.
        RuntimeError: If the HashiCorp Vault API returns an unexpected
            error.
    """
    vault_secret_id = self._get_vault_secret_id(secret_id)

    try:
        vault_secret = (
            self.client.secrets.kv.v2.read_secret(
                path=vault_secret_id,
            )
            .get("data", {})
            .get("data", {})
        )
    except InvalidPath:
        raise KeyError(f"Secret with ID {secret_id} not found")
    except VaultError as e:
        raise RuntimeError(
            f"Error fetching secret with ID {secret_id} {e}"
        )

    # The _convert_vault_secret method raises a KeyError if the
    # secret is tied to a workspace or user that no longer exists. Here we
    # simply pass the exception up the stack, as if the secret was not found
    # in the first place, knowing that it will be cascade-deleted soon.
    return self._convert_vault_secret(
        vault_secret,
    )
list_secrets(self, secret_filter_model)

List all secrets matching the given filter criteria.

Note that returned secrets do not include any secret values. To fetch the secret values, use get_secret.

Parameters:

Name Type Description Default
secret_filter_model SecretFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[SecretResponseModel]

A list of all secrets matching the filter criteria, with pagination information and sorted according to the filter criteria. The returned secrets do not include any secret values, only metadata. To fetch the secret values, use get_secret individually with each secret.

Exceptions:

Type Description
ValueError

If the filter contains an out-of-bounds page number.

RuntimeError

If the HashiCorp Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
def list_secrets(
    self, secret_filter_model: SecretFilterModel
) -> Page[SecretResponseModel]:
    """List all secrets matching the given filter criteria.

    Note that returned secrets do not include any secret values. To fetch
    the secret values, use `get_secret`.

    Args:
        secret_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all secrets matching the filter criteria, with pagination
        information and sorted according to the filter criteria. The
        returned secrets do not include any secret values, only metadata. To
        fetch the secret values, use `get_secret` individually with each
        secret.

    Raises:
        ValueError: If the filter contains an out-of-bounds page number.
        RuntimeError: If the HashiCorp Vault API returns an unexpected
            error.
    """
    # The HashiCorp Vault API does not natively support any of the
    # filtering, sorting or pagination options that ZenML supports. The
    # implementation of this method therefore has to fetch all secrets from
    # the Key Vault, then apply the filtering, sorting and pagination on
    # the client side.

    results: List[SecretResponseModel] = []

    try:
        # List all ZenML secrets in the Vault
        all_secrets = (
            self.client.secrets.kv.v2.list_secrets(
                path=HVAC_ZENML_SECRET_NAME_PREFIX
            )
            .get("data", {})
            .get("keys", [])
        )
    except InvalidPath:
        # no secrets created yet
        pass
    except VaultError as e:
        raise RuntimeError(f"Error listing HashiCorp Vault secrets: {e}")
    else:
        # Convert the Vault secrets to ZenML secrets
        for secret_uuid in all_secrets:
            vault_secret_id = (
                f"{HVAC_ZENML_SECRET_NAME_PREFIX}/{secret_uuid}"
            )
            try:
                vault_secret = (
                    self.client.secrets.kv.v2.read_secret(
                        path=vault_secret_id
                    )
                    .get("data", {})
                    .get("data", {})
                )
            except (InvalidPath, VaultError) as e:
                logging.warning(
                    f"Error fetching secret with ID {vault_secret_id}: {e}",
                )
                continue

            try:
                secret_model = self._convert_vault_secret(
                    vault_secret,
                )
            except KeyError as e:
                # The _convert_vault_secret method raises a KeyError
                # if the secret is tied to a workspace or user that no
                # longer exists or if it is otherwise not valid. Here we
                # pretend that the secret does not exist.
                logging.warning(
                    f"Error fetching secret with ID {vault_secret_id}: {e}",
                )
                continue

            # Filter the secret on the client side.
            if not secret_filter_model.secret_matches(secret_model):
                continue

            # Remove the secret values from the response
            secret_model.values = {}
            results.append(secret_model)

    # Sort the results
    sorted_results = secret_filter_model.sort_secrets(results)

    # Paginate the results
    total = len(sorted_results)
    if total == 0:
        total_pages = 1
    else:
        total_pages = math.ceil(total / secret_filter_model.size)

    if secret_filter_model.page > total_pages:
        raise ValueError(
            f"Invalid page {secret_filter_model.page}. The requested page "
            f"size is {secret_filter_model.size} and there are a total of "
            f"{total} items for this query. The maximum page value "
            f"therefore is {total_pages}."
        )

    return Page(
        total=total,
        total_pages=total_pages,
        items=sorted_results[
            (secret_filter_model.page - 1)
            * secret_filter_model.size : secret_filter_model.page
            * secret_filter_model.size
        ],
        index=secret_filter_model.page,
        max_size=secret_filter_model.size,
    )
update_secret(*args, **kwargs)

Updates a secret.

Secret values that are specified as None in the update that are present in the existing secret are removed from the existing secret. Values that are present in both secrets are overwritten. All other values in both the existing secret and the update are kept (merged).

If the update includes a change of name or scope, the scoping rules enforced in the secrets store are used to validate the update:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret_id

The ID of the secret to be updated.

required
secret_update

The update to be applied.

required

Returns:

Type Description
Any

The updated secret.

Exceptions:

Type Description
KeyError

If the secret does not exist.

EntityExistsError

If the update includes a change of name or scope and a secret with the same name already exists in the same scope.

RuntimeError

If the HashiCorp Vault API returns an unexpected error.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
HashiCorpVaultSecretsStoreConfiguration (SecretsStoreConfiguration) pydantic-model

HashiCorp Vault secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

vault_addr str

The url of the Vault server. If not set, the value will be loaded from the VAULT_ADDR environment variable, if configured.

vault_token Optional[pydantic.types.SecretStr]

The token used to authenticate with the Vault server. If not set, the token will be loaded from the VAULT_TOKEN environment variable or from the ~/.vault-token file, if configured.

vault_namespace Optional[str]

The Vault Enterprise namespace.

mount_point Optional[str]

The mount point to use for all secrets.

max_versions int

The maximum number of secret versions to keep.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
class HashiCorpVaultSecretsStoreConfiguration(SecretsStoreConfiguration):
    """HashiCorp Vault secrets store configuration.

    Attributes:
        type: The type of the store.
        vault_addr: The url of the Vault server. If not set, the value will be
            loaded from the VAULT_ADDR environment variable, if configured.
        vault_token: The token used to authenticate with the Vault server. If
            not set, the token will be loaded from the VAULT_TOKEN environment
            variable or from the ~/.vault-token file, if configured.
        vault_namespace: The Vault Enterprise namespace.
        mount_point: The mount point to use for all secrets.
        max_versions: The maximum number of secret versions to keep.
    """

    type: SecretsStoreType = SecretsStoreType.HASHICORP

    vault_addr: str
    vault_token: Optional[SecretStr] = None
    vault_namespace: Optional[str] = None
    mount_point: Optional[str] = None
    max_versions: int = 1

    class Config:
        """Pydantic configuration class."""

        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/hashicorp_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Forbid extra attributes set in the class.
    extra = "forbid"

rest_secrets_store

REST Secrets Store implementation.

RestSecretsStore (BaseSecretsStore) pydantic-model

Secrets store implementation that uses the REST ZenML store as a backend.

This secrets store piggybacks on the REST ZenML store. It uses the same REST client configuration as the REST ZenML store.

Attributes:

Name Type Description
config

The configuration of the REST secrets store.

TYPE

The type of the store.

CONFIG_TYPE

The type of the store configuration.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
class RestSecretsStore(BaseSecretsStore):
    """Secrets store implementation that uses the REST ZenML store as a backend.

    This secrets store piggybacks on the REST ZenML store. It uses the same
    REST client configuration as the REST ZenML store.

    Attributes:
        config: The configuration of the REST secrets store.
        TYPE: The type of the store.
        CONFIG_TYPE: The type of the store configuration.
    """

    config: RestSecretsStoreConfiguration
    TYPE: ClassVar[SecretsStoreType] = SecretsStoreType.REST
    CONFIG_TYPE: ClassVar[
        Type[SecretsStoreConfiguration]
    ] = RestSecretsStoreConfiguration

    def __init__(
        self,
        zen_store: "BaseZenStore",
        **kwargs: Any,
    ) -> None:
        """Create and initialize the REST secrets store.

        Args:
            zen_store: The ZenML store that owns this REST secrets store.
            **kwargs: Additional keyword arguments to pass to the Pydantic
                constructor.

        Raises:
            IllegalOperationError: If the ZenML store to which this secrets
                store belongs is not a REST ZenML store.
        """
        from zenml.zen_stores.rest_zen_store import RestZenStore

        if not isinstance(zen_store, RestZenStore):
            raise IllegalOperationError(
                "The REST secrets store can only be used with the REST ZenML "
                "store."
            )
        super().__init__(zen_store, **kwargs)

    @property
    def zen_store(self) -> "RestZenStore":
        """The ZenML store that this REST secrets store is using as a back-end.

        Returns:
            The ZenML store that this REST secrets store is using as a back-end.

        Raises:
            ValueError: If the store is not initialized.
        """
        from zenml.zen_stores.rest_zen_store import RestZenStore

        if not self._zen_store:
            raise ValueError("Store not initialized")
        assert isinstance(self._zen_store, RestZenStore)
        return self._zen_store

    # ====================================
    # Secrets Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the secrets REST store."""
        logger.debug("Initializing RestSecretsStore")

        # Nothing else to do here, the REST ZenML store back-end is already
        # initialized

    # ------
    # Secrets
    # ------

    @track(AnalyticsEvent.CREATED_SECRET)
    def create_secret(self, secret: SecretRequestModel) -> SecretResponseModel:
        """Creates a new secret.

        The new secret is also validated against the scoping rules enforced in
        the secrets store:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret: The secret to create.

        Returns:
            The newly created secret.
        """
        return self.zen_store._create_workspace_scoped_resource(
            resource=secret,
            route=SECRETS,
            response_model=SecretResponseModel,
        )

    def get_secret(self, secret_id: UUID) -> SecretResponseModel:
        """Get a secret by ID.

        Args:
            secret_id: The ID of the secret to fetch.

        Returns:
            The secret.
        """
        return self.zen_store._get_resource(
            resource_id=secret_id,
            route=SECRETS,
            response_model=SecretResponseModel,
        )

    def list_secrets(
        self, secret_filter_model: SecretFilterModel
    ) -> Page[SecretResponseModel]:
        """List all secrets matching the given filter criteria.

        Note that returned secrets do not include any secret values. To fetch
        the secret values, use `get_secret`.

        Args:
            secret_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all secrets matching the filter criteria, with pagination
            information and sorted according to the filter criteria. The
            returned secrets do not include any secret values, only metadata. To
            fetch the secret values, use `get_secret` individually with each
            secret.
        """
        return self.zen_store._list_paginated_resources(
            route=SECRETS,
            response_model=SecretResponseModel,
            filter_model=secret_filter_model,
        )

    @track(AnalyticsEvent.UPDATED_SECRET)
    def update_secret(
        self, secret_id: UUID, secret_update: SecretUpdateModel
    ) -> SecretResponseModel:
        """Updates a secret.

        Secret values that are specified as `None` in the update that are
        present in the existing secret are removed from the existing secret.
        Values that are present in both secrets are overwritten. All other
        values in both the existing secret and the update are kept (merged).

        If the update includes a change of name or scope, the scoping rules
        enforced in the secrets store are used to validate the update:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret_id: The ID of the secret to be updated.
            secret_update: The update to be applied.

        Returns:
            The updated secret.
        """
        return self.zen_store._update_resource(
            resource_id=secret_id,
            resource_update=secret_update,
            route=SECRETS,
            response_model=SecretResponseModel,
            # The default endpoint behavior is to replace all secret values
            # with the values in the update. We want to merge the values
            # instead.
            params=dict(patch_values=True),
        )

    @track(AnalyticsEvent.DELETED_SECRET)
    def delete_secret(self, secret_id: UUID) -> None:
        """Delete a secret.

        Args:
            secret_id: The id of the secret to delete.
        """
        self.zen_store._delete_resource(
            resource_id=secret_id,
            route=SECRETS,
        )
zen_store: RestZenStore property readonly

The ZenML store that this REST secrets store is using as a back-end.

Returns:

Type Description
RestZenStore

The ZenML store that this REST secrets store is using as a back-end.

Exceptions:

Type Description
ValueError

If the store is not initialized.

CONFIG_TYPE (SecretsStoreConfiguration) pydantic-model

REST secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
class RestSecretsStoreConfiguration(SecretsStoreConfiguration):
    """REST secrets store configuration.

    Attributes:
        type: The type of the store.
    """

    type: SecretsStoreType = SecretsStoreType.REST

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the certificate attributes can be expanded to the contents
        # of the certificate files.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the certificate attributes can be expanded to the contents
    # of the certificate files.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
__init__(self, zen_store, **kwargs) special

Create and initialize the REST secrets store.

Parameters:

Name Type Description Default
zen_store BaseZenStore

The ZenML store that owns this REST secrets store.

required
**kwargs Any

Additional keyword arguments to pass to the Pydantic constructor.

{}

Exceptions:

Type Description
IllegalOperationError

If the ZenML store to which this secrets store belongs is not a REST ZenML store.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
def __init__(
    self,
    zen_store: "BaseZenStore",
    **kwargs: Any,
) -> None:
    """Create and initialize the REST secrets store.

    Args:
        zen_store: The ZenML store that owns this REST secrets store.
        **kwargs: Additional keyword arguments to pass to the Pydantic
            constructor.

    Raises:
        IllegalOperationError: If the ZenML store to which this secrets
            store belongs is not a REST ZenML store.
    """
    from zenml.zen_stores.rest_zen_store import RestZenStore

    if not isinstance(zen_store, RestZenStore):
        raise IllegalOperationError(
            "The REST secrets store can only be used with the REST ZenML "
            "store."
        )
    super().__init__(zen_store, **kwargs)
create_secret(*args, **kwargs)

Creates a new secret.

The new secret is also validated against the scoping rules enforced in the secrets store:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret

The secret to create.

required

Returns:

Type Description
Any

The newly created secret.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_secret(*args, **kwargs)

Delete a secret.

Parameters:

Name Type Description Default
secret_id

The id of the secret to delete.

required
Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
get_secret(self, secret_id)

Get a secret by ID.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to fetch.

required

Returns:

Type Description
SecretResponseModel

The secret.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
def get_secret(self, secret_id: UUID) -> SecretResponseModel:
    """Get a secret by ID.

    Args:
        secret_id: The ID of the secret to fetch.

    Returns:
        The secret.
    """
    return self.zen_store._get_resource(
        resource_id=secret_id,
        route=SECRETS,
        response_model=SecretResponseModel,
    )
list_secrets(self, secret_filter_model)

List all secrets matching the given filter criteria.

Note that returned secrets do not include any secret values. To fetch the secret values, use get_secret.

Parameters:

Name Type Description Default
secret_filter_model SecretFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[SecretResponseModel]

A list of all secrets matching the filter criteria, with pagination information and sorted according to the filter criteria. The returned secrets do not include any secret values, only metadata. To fetch the secret values, use get_secret individually with each secret.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
def list_secrets(
    self, secret_filter_model: SecretFilterModel
) -> Page[SecretResponseModel]:
    """List all secrets matching the given filter criteria.

    Note that returned secrets do not include any secret values. To fetch
    the secret values, use `get_secret`.

    Args:
        secret_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all secrets matching the filter criteria, with pagination
        information and sorted according to the filter criteria. The
        returned secrets do not include any secret values, only metadata. To
        fetch the secret values, use `get_secret` individually with each
        secret.
    """
    return self.zen_store._list_paginated_resources(
        route=SECRETS,
        response_model=SecretResponseModel,
        filter_model=secret_filter_model,
    )
update_secret(*args, **kwargs)

Updates a secret.

Secret values that are specified as None in the update that are present in the existing secret are removed from the existing secret. Values that are present in both secrets are overwritten. All other values in both the existing secret and the update are kept (merged).

If the update includes a change of name or scope, the scoping rules enforced in the secrets store are used to validate the update:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret_id

The ID of the secret to be updated.

required
secret_update

The update to be applied.

required

Returns:

Type Description
Any

The updated secret.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
RestSecretsStoreConfiguration (SecretsStoreConfiguration) pydantic-model

REST secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
class RestSecretsStoreConfiguration(SecretsStoreConfiguration):
    """REST secrets store configuration.

    Attributes:
        type: The type of the store.
    """

    type: SecretsStoreType = SecretsStoreType.REST

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the certificate attributes can be expanded to the contents
        # of the certificate files.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/rest_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the certificate attributes can be expanded to the contents
    # of the certificate files.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"

secrets_store_interface

ZenML secrets store interface.

SecretsStoreInterface (ABC)

ZenML secrets store interface.

All ZenML secrets stores must implement the methods in this interface.

Source code in zenml/zen_stores/secrets_stores/secrets_store_interface.py
class SecretsStoreInterface(ABC):
    """ZenML secrets store interface.

    All ZenML secrets stores must implement the methods in this interface.
    """

    # ---------------------------------
    # Initialization and configuration
    # ---------------------------------

    @abstractmethod
    def _initialize(self) -> None:
        """Initialize the secrets store.

        This method is called immediately after the secrets store is created.
        It should be used to set up the backend (database, connection etc.).
        """

    # ---------
    # Secrets
    # ---------

    @abstractmethod
    def create_secret(
        self,
        secret: SecretRequestModel,
    ) -> SecretResponseModel:
        """Creates a new secret.

        The new secret is also validated against the scoping rules enforced in
        the secrets store:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret: The secret to create.

        Returns:
            The newly created secret.

        Raises:
            KeyError: if the user or workspace does not exist.
            EntityExistsError: If a secret with the same name already exists in
                the same scope.
            ValueError: if the secret is invalid.
        """

    @abstractmethod
    def get_secret(self, secret_id: UUID) -> SecretResponseModel:
        """Get a secret with a given name.

        Args:
            secret_id: ID of the secret.

        Returns:
            The secret.

        Raises:
            KeyError: if the secret does not exist.
        """

    @abstractmethod
    def list_secrets(
        self, secret_filter_model: SecretFilterModel
    ) -> Page[SecretResponseModel]:
        """List all secrets matching the given filter criteria.

        Note that returned secrets do not include any secret values. To fetch
        the secret values, use `get_secret`.

        Args:
            secret_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all secrets matching the filter criteria, with pagination
            information and sorted according to the filter criteria. The
            returned secrets do not include any secret values, only metadata. To
            fetch the secret values, use `get_secret` individually with each
            secret.
        """

    @abstractmethod
    def update_secret(
        self,
        secret_id: UUID,
        secret_update: SecretUpdateModel,
    ) -> SecretResponseModel:
        """Updates a secret.

        Secret values that are specified as `None` in the update that are
        present in the existing secret are removed from the existing secret.
        Values that are present in both secrets are overwritten. All other
        values in both the existing secret and the update are kept (merged).

        If the update includes a change of name or scope, the scoping rules
        enforced in the secrets store are used to validate the update:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret_id: The ID of the secret to be updated.
            secret_update: The update to be applied.

        Returns:
            The updated secret.

        Raises:
            KeyError: if the secret doesn't exist.
            EntityExistsError: If a secret with the same name already exists in
                the same scope.
            ValueError: if the secret is invalid.
        """

    @abstractmethod
    def delete_secret(self, secret_id: UUID) -> None:
        """Deletes a secret.

        Args:
            secret_id: The ID of the secret to delete.

        Raises:
            KeyError: if the secret doesn't exist.
        """
create_secret(self, secret)

Creates a new secret.

The new secret is also validated against the scoping rules enforced in the secrets store:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret SecretRequestModel

The secret to create.

required

Returns:

Type Description
SecretResponseModel

The newly created secret.

Exceptions:

Type Description
KeyError

if the user or workspace does not exist.

EntityExistsError

If a secret with the same name already exists in the same scope.

ValueError

if the secret is invalid.

Source code in zenml/zen_stores/secrets_stores/secrets_store_interface.py
@abstractmethod
def create_secret(
    self,
    secret: SecretRequestModel,
) -> SecretResponseModel:
    """Creates a new secret.

    The new secret is also validated against the scoping rules enforced in
    the secrets store:

      - only one workspace-scoped secret with the given name can exist
        in the target workspace.
      - only one user-scoped secret with the given name can exist in the
        target workspace for the target user.

    Args:
        secret: The secret to create.

    Returns:
        The newly created secret.

    Raises:
        KeyError: if the user or workspace does not exist.
        EntityExistsError: If a secret with the same name already exists in
            the same scope.
        ValueError: if the secret is invalid.
    """
delete_secret(self, secret_id)

Deletes a secret.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to delete.

required

Exceptions:

Type Description
KeyError

if the secret doesn't exist.

Source code in zenml/zen_stores/secrets_stores/secrets_store_interface.py
@abstractmethod
def delete_secret(self, secret_id: UUID) -> None:
    """Deletes a secret.

    Args:
        secret_id: The ID of the secret to delete.

    Raises:
        KeyError: if the secret doesn't exist.
    """
get_secret(self, secret_id)

Get a secret with a given name.

Parameters:

Name Type Description Default
secret_id UUID

ID of the secret.

required

Returns:

Type Description
SecretResponseModel

The secret.

Exceptions:

Type Description
KeyError

if the secret does not exist.

Source code in zenml/zen_stores/secrets_stores/secrets_store_interface.py
@abstractmethod
def get_secret(self, secret_id: UUID) -> SecretResponseModel:
    """Get a secret with a given name.

    Args:
        secret_id: ID of the secret.

    Returns:
        The secret.

    Raises:
        KeyError: if the secret does not exist.
    """
list_secrets(self, secret_filter_model)

List all secrets matching the given filter criteria.

Note that returned secrets do not include any secret values. To fetch the secret values, use get_secret.

Parameters:

Name Type Description Default
secret_filter_model SecretFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[SecretResponseModel]

A list of all secrets matching the filter criteria, with pagination information and sorted according to the filter criteria. The returned secrets do not include any secret values, only metadata. To fetch the secret values, use get_secret individually with each secret.

Source code in zenml/zen_stores/secrets_stores/secrets_store_interface.py
@abstractmethod
def list_secrets(
    self, secret_filter_model: SecretFilterModel
) -> Page[SecretResponseModel]:
    """List all secrets matching the given filter criteria.

    Note that returned secrets do not include any secret values. To fetch
    the secret values, use `get_secret`.

    Args:
        secret_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all secrets matching the filter criteria, with pagination
        information and sorted according to the filter criteria. The
        returned secrets do not include any secret values, only metadata. To
        fetch the secret values, use `get_secret` individually with each
        secret.
    """
update_secret(self, secret_id, secret_update)

Updates a secret.

Secret values that are specified as None in the update that are present in the existing secret are removed from the existing secret. Values that are present in both secrets are overwritten. All other values in both the existing secret and the update are kept (merged).

If the update includes a change of name or scope, the scoping rules enforced in the secrets store are used to validate the update:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to be updated.

required
secret_update SecretUpdateModel

The update to be applied.

required

Returns:

Type Description
SecretResponseModel

The updated secret.

Exceptions:

Type Description
KeyError

if the secret doesn't exist.

EntityExistsError

If a secret with the same name already exists in the same scope.

ValueError

if the secret is invalid.

Source code in zenml/zen_stores/secrets_stores/secrets_store_interface.py
@abstractmethod
def update_secret(
    self,
    secret_id: UUID,
    secret_update: SecretUpdateModel,
) -> SecretResponseModel:
    """Updates a secret.

    Secret values that are specified as `None` in the update that are
    present in the existing secret are removed from the existing secret.
    Values that are present in both secrets are overwritten. All other
    values in both the existing secret and the update are kept (merged).

    If the update includes a change of name or scope, the scoping rules
    enforced in the secrets store are used to validate the update:

      - only one workspace-scoped secret with the given name can exist
        in the target workspace.
      - only one user-scoped secret with the given name can exist in the
        target workspace for the target user.

    Args:
        secret_id: The ID of the secret to be updated.
        secret_update: The update to be applied.

    Returns:
        The updated secret.

    Raises:
        KeyError: if the secret doesn't exist.
        EntityExistsError: If a secret with the same name already exists in
            the same scope.
        ValueError: if the secret is invalid.
    """

sql_secrets_store

SQL Secrets Store implementation.

SqlSecretsStore (BaseSecretsStore) pydantic-model

Secrets store implementation that uses the SQL ZenML store as a backend.

This secrets store piggybacks on the SQL ZenML store. It uses the same database and configuration as the SQL ZenML store.

Attributes:

Name Type Description
config

The configuration of the SQL secrets store.

TYPE

The type of the store.

CONFIG_TYPE

The type of the store configuration.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
class SqlSecretsStore(BaseSecretsStore):
    """Secrets store implementation that uses the SQL ZenML store as a backend.

    This secrets store piggybacks on the SQL ZenML store. It uses the same
    database and configuration as the SQL ZenML store.

    Attributes:
        config: The configuration of the SQL secrets store.
        TYPE: The type of the store.
        CONFIG_TYPE: The type of the store configuration.
    """

    config: SqlSecretsStoreConfiguration
    TYPE: ClassVar[SecretsStoreType] = SecretsStoreType.SQL
    CONFIG_TYPE: ClassVar[
        Type[SecretsStoreConfiguration]
    ] = SqlSecretsStoreConfiguration

    _encryption_engine: Optional[AesGcmEngine] = None

    def __init__(
        self,
        zen_store: "BaseZenStore",
        **kwargs: Any,
    ) -> None:
        """Create and initialize the SQL secrets store.

        Args:
            zen_store: The ZenML store that owns this SQL secrets store.
            **kwargs: Additional keyword arguments to pass to the Pydantic
                constructor.

        Raises:
            IllegalOperationError: If the ZenML store to which this secrets
                store belongs is not a SQL ZenML store.
        """
        from zenml.zen_stores.sql_zen_store import SqlZenStore

        if not isinstance(zen_store, SqlZenStore):
            raise IllegalOperationError(
                "The SQL secrets store can only be used with the SQL ZenML "
                "store."
            )
        super().__init__(zen_store, **kwargs)

    @property
    def engine(self) -> Engine:
        """The SQLAlchemy engine.

        Returns:
            The SQLAlchemy engine.
        """
        return self.zen_store.engine

    @property
    def zen_store(self) -> "SqlZenStore":
        """The ZenML store that this SQL secrets store is using as a back-end.

        Returns:
            The ZenML store that this SQL secrets store is using as a back-end.

        Raises:
            ValueError: If the store is not initialized.
        """
        from zenml.zen_stores.sql_zen_store import SqlZenStore

        if not self._zen_store:
            raise ValueError("Store not initialized")
        assert isinstance(self._zen_store, SqlZenStore)
        return self._zen_store

    # ====================================
    # Secrets Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the secrets SQL store."""
        logger.debug("Initializing SqlSecretsStore")

        # Initialize the encryption engine
        if self.config.encryption_key:
            self._encryption_engine = AesGcmEngine()
            self._encryption_engine._update_key(self.config.encryption_key)

        # Nothing else to do here, the SQL ZenML store back-end is already
        # initialized

    # ------
    # Secrets
    # ------

    def _check_sql_secret_scope(
        self,
        session: Session,
        secret_name: str,
        scope: SecretScope,
        workspace: UUID,
        user: UUID,
        exclude_secret_id: Optional[UUID] = None,
    ) -> Tuple[bool, str]:
        """Checks if a secret with the given name already exists in the given scope.

        This method enforces the following scope rules:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            session: The SQLAlchemy session.
            secret_name: The name of the secret.
            scope: The scope of the secret.
            workspace: The ID of the workspace to which the secret belongs.
            user: The ID of the user to which the secret belongs.
            exclude_secret_id: The ID of a secret to exclude from the check
                (used e.g. during an update to exclude the existing secret).

        Returns:
            True if a secret with the given name already exists in the given
            scope, False otherwise, and an error message.
        """
        scope_filter = (
            select(SecretSchema)
            .where(SecretSchema.name == secret_name)
            .where(SecretSchema.scope == scope.value)
        )

        if scope in [SecretScope.WORKSPACE, SecretScope.USER]:
            scope_filter = scope_filter.where(
                SecretSchema.workspace_id == workspace
            )
        if scope == SecretScope.USER:
            scope_filter = scope_filter.where(SecretSchema.user_id == user)
        if exclude_secret_id is not None:
            scope_filter = scope_filter.where(
                SecretSchema.id != exclude_secret_id
            )

        existing_secret = session.exec(scope_filter).first()

        if existing_secret is not None:
            existing_secret_model = existing_secret.to_model(
                encryption_engine=self._encryption_engine
            )

            msg = (
                f"Found an existing {scope.value} scoped secret with the "
                f"same '{secret_name}' name"
            )
            if scope in [SecretScope.WORKSPACE, SecretScope.USER]:
                msg += (
                    f" in the same '{existing_secret_model.workspace.name}' "
                    f"workspace"
                )
            if scope == SecretScope.USER:
                assert existing_secret_model.user
                msg += (
                    f" for the same '{existing_secret_model.user.name}' user"
                )

            return True, msg

        return False, ""

    @track(AnalyticsEvent.CREATED_SECRET, v2=True)
    def create_secret(self, secret: SecretRequestModel) -> SecretResponseModel:
        """Creates a new secret.

        The new secret is also validated against the scoping rules enforced in
        the secrets store:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret: The secret to create.

        Returns:
            The newly created secret.

        Raises:
            EntityExistsError: If a secret with the same name already exists in
                the same scope.
        """
        with Session(self.engine) as session:
            # Check if a secret with the same name already exists in the same
            # scope.
            secret_exists, msg = self._check_sql_secret_scope(
                session=session,
                secret_name=secret.name,
                scope=secret.scope,
                workspace=secret.workspace,
                user=secret.user,
            )
            if secret_exists:
                raise EntityExistsError(msg)

            new_secret = SecretSchema.from_request(
                secret, encryption_engine=self._encryption_engine
            )
            session.add(new_secret)
            session.commit()

            return new_secret.to_model(
                encryption_engine=self._encryption_engine
            )

    def get_secret(self, secret_id: UUID) -> SecretResponseModel:
        """Get a secret by ID.

        Args:
            secret_id: The ID of the secret to fetch.

        Returns:
            The secret.

        Raises:
            KeyError: if the secret doesn't exist.
        """
        with Session(self.engine) as session:
            secret_in_db = session.exec(
                select(SecretSchema).where(SecretSchema.id == secret_id)
            ).first()
            if secret_in_db is None:
                raise KeyError(f"Secret with ID {secret_id} not found.")
            return secret_in_db.to_model(
                encryption_engine=self._encryption_engine
            )

    def list_secrets(
        self, secret_filter_model: SecretFilterModel
    ) -> Page[SecretResponseModel]:
        """List all secrets matching the given filter criteria.

        Note that returned secrets do not include any secret values. To fetch
        the secret values, use `get_secret`.

        Args:
            secret_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all secrets matching the filter criteria, with pagination
            information and sorted according to the filter criteria. The
            returned secrets do not include any secret values, only metadata. To
            fetch the secret values, use `get_secret` individually with each
            secret.
        """
        with Session(self.engine) as session:
            query = select(SecretSchema)
            return self.zen_store.filter_and_paginate(
                session=session,
                query=query,
                table=SecretSchema,
                filter_model=secret_filter_model,
                custom_schema_to_model_conversion=lambda secret: secret.to_model(
                    include_values=False
                ),
            )

    @track(AnalyticsEvent.UPDATED_SECRET)
    def update_secret(
        self, secret_id: UUID, secret_update: SecretUpdateModel
    ) -> SecretResponseModel:
        """Updates a secret.

        Secret values that are specified as `None` in the update that are
        present in the existing secret are removed from the existing secret.
        Values that are present in both secrets are overwritten. All other
        values in both the existing secret and the update are kept (merged).

        If the update includes a change of name or scope, the scoping rules
        enforced in the secrets store are used to validate the update:

          - only one workspace-scoped secret with the given name can exist
            in the target workspace.
          - only one user-scoped secret with the given name can exist in the
            target workspace for the target user.

        Args:
            secret_id: The ID of the secret to be updated.
            secret_update: The update to be applied.

        Returns:
            The updated secret.

        Raises:
            KeyError: if the secret doesn't exist.
            EntityExistsError: If a secret with the same name already exists in
                the same scope.
        """
        with Session(self.engine) as session:
            existing_secret = session.exec(
                select(SecretSchema).where(SecretSchema.id == secret_id)
            ).first()

            if not existing_secret:
                raise KeyError(f"Secret with ID {secret_id} not found.")

            # Prevent changes to the secret's user or workspace
            self._validate_user_and_workspace_update(
                secret_update=secret_update,
                current_user=existing_secret.user.id,
                current_workspace=existing_secret.workspace.id,
            )

            # A change in name or scope requires a check of the scoping rules.
            if (
                secret_update.name is not None
                and existing_secret.name != secret_update.name
                or secret_update.scope is not None
                and existing_secret.scope != secret_update.scope
            ):
                secret_exists, msg = self._check_sql_secret_scope(
                    session=session,
                    secret_name=secret_update.name or existing_secret.name,
                    scope=secret_update.scope
                    or SecretScope(existing_secret.scope),
                    workspace=secret_update.workspace
                    or existing_secret.workspace.id,
                    user=secret_update.user or existing_secret.user.id,
                    exclude_secret_id=secret_id,
                )

                if secret_exists:
                    raise EntityExistsError(msg)

            existing_secret.update(
                secret_update=secret_update,
                encryption_engine=self._encryption_engine,
            )
            session.add(existing_secret)
            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_secret)
            return existing_secret.to_model(
                encryption_engine=self._encryption_engine
            )

    @track(AnalyticsEvent.DELETED_SECRET)
    def delete_secret(self, secret_id: UUID) -> None:
        """Delete a secret.

        Args:
            secret_id: The id of the secret to delete.

        Raises:
            KeyError: if the secret doesn't exist.
        """
        with Session(self.engine) as session:
            try:
                secret_in_db = session.exec(
                    select(SecretSchema).where(SecretSchema.id == secret_id)
                ).one()
                session.delete(secret_in_db)
                session.commit()
            except NoResultFound:
                raise KeyError(f"Secret with ID {secret_id} not found.")
engine: Engine property readonly

The SQLAlchemy engine.

Returns:

Type Description
Engine

The SQLAlchemy engine.

zen_store: SqlZenStore property readonly

The ZenML store that this SQL secrets store is using as a back-end.

Returns:

Type Description
SqlZenStore

The ZenML store that this SQL secrets store is using as a back-end.

Exceptions:

Type Description
ValueError

If the store is not initialized.

CONFIG_TYPE (SecretsStoreConfiguration) pydantic-model

SQL secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

encryption_key Optional[str]

The encryption key to use for the SQL secrets store. If not set, the passwords will not be encrypted in the database.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
class SqlSecretsStoreConfiguration(SecretsStoreConfiguration):
    """SQL secrets store configuration.

    Attributes:
        type: The type of the store.
        encryption_key: The encryption key to use for the SQL secrets store.
            If not set, the passwords will not be encrypted in the database.
    """

    type: SecretsStoreType = SecretsStoreType.SQL
    encryption_key: Optional[str] = None

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the certificate attributes can be expanded to the contents
        # of the certificate files.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the certificate attributes can be expanded to the contents
    # of the certificate files.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
__init__(self, zen_store, **kwargs) special

Create and initialize the SQL secrets store.

Parameters:

Name Type Description Default
zen_store BaseZenStore

The ZenML store that owns this SQL secrets store.

required
**kwargs Any

Additional keyword arguments to pass to the Pydantic constructor.

{}

Exceptions:

Type Description
IllegalOperationError

If the ZenML store to which this secrets store belongs is not a SQL ZenML store.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
def __init__(
    self,
    zen_store: "BaseZenStore",
    **kwargs: Any,
) -> None:
    """Create and initialize the SQL secrets store.

    Args:
        zen_store: The ZenML store that owns this SQL secrets store.
        **kwargs: Additional keyword arguments to pass to the Pydantic
            constructor.

    Raises:
        IllegalOperationError: If the ZenML store to which this secrets
            store belongs is not a SQL ZenML store.
    """
    from zenml.zen_stores.sql_zen_store import SqlZenStore

    if not isinstance(zen_store, SqlZenStore):
        raise IllegalOperationError(
            "The SQL secrets store can only be used with the SQL ZenML "
            "store."
        )
    super().__init__(zen_store, **kwargs)
create_secret(*args, **kwargs)

Creates a new secret.

The new secret is also validated against the scoping rules enforced in the secrets store:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret

The secret to create.

required

Returns:

Type Description
Any

The newly created secret.

Exceptions:

Type Description
EntityExistsError

If a secret with the same name already exists in the same scope.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_secret(*args, **kwargs)

Delete a secret.

Parameters:

Name Type Description Default
secret_id

The id of the secret to delete.

required

Exceptions:

Type Description
KeyError

if the secret doesn't exist.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
get_secret(self, secret_id)

Get a secret by ID.

Parameters:

Name Type Description Default
secret_id UUID

The ID of the secret to fetch.

required

Returns:

Type Description
SecretResponseModel

The secret.

Exceptions:

Type Description
KeyError

if the secret doesn't exist.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
def get_secret(self, secret_id: UUID) -> SecretResponseModel:
    """Get a secret by ID.

    Args:
        secret_id: The ID of the secret to fetch.

    Returns:
        The secret.

    Raises:
        KeyError: if the secret doesn't exist.
    """
    with Session(self.engine) as session:
        secret_in_db = session.exec(
            select(SecretSchema).where(SecretSchema.id == secret_id)
        ).first()
        if secret_in_db is None:
            raise KeyError(f"Secret with ID {secret_id} not found.")
        return secret_in_db.to_model(
            encryption_engine=self._encryption_engine
        )
list_secrets(self, secret_filter_model)

List all secrets matching the given filter criteria.

Note that returned secrets do not include any secret values. To fetch the secret values, use get_secret.

Parameters:

Name Type Description Default
secret_filter_model SecretFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[SecretResponseModel]

A list of all secrets matching the filter criteria, with pagination information and sorted according to the filter criteria. The returned secrets do not include any secret values, only metadata. To fetch the secret values, use get_secret individually with each secret.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
def list_secrets(
    self, secret_filter_model: SecretFilterModel
) -> Page[SecretResponseModel]:
    """List all secrets matching the given filter criteria.

    Note that returned secrets do not include any secret values. To fetch
    the secret values, use `get_secret`.

    Args:
        secret_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all secrets matching the filter criteria, with pagination
        information and sorted according to the filter criteria. The
        returned secrets do not include any secret values, only metadata. To
        fetch the secret values, use `get_secret` individually with each
        secret.
    """
    with Session(self.engine) as session:
        query = select(SecretSchema)
        return self.zen_store.filter_and_paginate(
            session=session,
            query=query,
            table=SecretSchema,
            filter_model=secret_filter_model,
            custom_schema_to_model_conversion=lambda secret: secret.to_model(
                include_values=False
            ),
        )
update_secret(*args, **kwargs)

Updates a secret.

Secret values that are specified as None in the update that are present in the existing secret are removed from the existing secret. Values that are present in both secrets are overwritten. All other values in both the existing secret and the update are kept (merged).

If the update includes a change of name or scope, the scoping rules enforced in the secrets store are used to validate the update:

  • only one workspace-scoped secret with the given name can exist in the target workspace.
  • only one user-scoped secret with the given name can exist in the target workspace for the target user.

Parameters:

Name Type Description Default
secret_id

The ID of the secret to be updated.

required
secret_update

The update to be applied.

required

Returns:

Type Description
Any

The updated secret.

Exceptions:

Type Description
KeyError

if the secret doesn't exist.

EntityExistsError

If a secret with the same name already exists in the same scope.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
SqlSecretsStoreConfiguration (SecretsStoreConfiguration) pydantic-model

SQL secrets store configuration.

Attributes:

Name Type Description
type SecretsStoreType

The type of the store.

encryption_key Optional[str]

The encryption key to use for the SQL secrets store. If not set, the passwords will not be encrypted in the database.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
class SqlSecretsStoreConfiguration(SecretsStoreConfiguration):
    """SQL secrets store configuration.

    Attributes:
        type: The type of the store.
        encryption_key: The encryption key to use for the SQL secrets store.
            If not set, the passwords will not be encrypted in the database.
    """

    type: SecretsStoreType = SecretsStoreType.SQL
    encryption_key: Optional[str] = None

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the certificate attributes can be expanded to the contents
        # of the certificate files.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/secrets_stores/sql_secrets_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the certificate attributes can be expanded to the contents
    # of the certificate files.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"

sql_zen_store

SQL Zen Store implementation.

SQLDatabaseDriver (StrEnum)

SQL database drivers supported by the SQL ZenML store.

Source code in zenml/zen_stores/sql_zen_store.py
class SQLDatabaseDriver(StrEnum):
    """SQL database drivers supported by the SQL ZenML store."""

    MYSQL = "mysql"
    SQLITE = "sqlite"

SqlZenStore (BaseZenStore) pydantic-model

Store Implementation that uses SQL database backend.

Attributes:

Name Type Description
config SqlZenStoreConfiguration

The configuration of the SQL ZenML store.

skip_migrations bool

Whether to skip migrations when initializing the store.

TYPE ClassVar[zenml.enums.StoreType]

The type of the store.

CONFIG_TYPE ClassVar[Type[zenml.config.store_config.StoreConfiguration]]

The type of the store configuration.

_engine Optional[sqlalchemy.engine.base.Engine]

The SQLAlchemy engine.

Source code in zenml/zen_stores/sql_zen_store.py
class SqlZenStore(BaseZenStore):
    """Store Implementation that uses SQL database backend.

    Attributes:
        config: The configuration of the SQL ZenML store.
        skip_migrations: Whether to skip migrations when initializing the store.
        TYPE: The type of the store.
        CONFIG_TYPE: The type of the store configuration.
        _engine: The SQLAlchemy engine.
    """

    config: SqlZenStoreConfiguration
    skip_migrations: bool = False
    TYPE: ClassVar[StoreType] = StoreType.SQL
    CONFIG_TYPE: ClassVar[Type[StoreConfiguration]] = SqlZenStoreConfiguration

    _engine: Optional[Engine] = None
    _alembic: Optional[Alembic] = None

    @property
    def engine(self) -> Engine:
        """The SQLAlchemy engine.

        Returns:
            The SQLAlchemy engine.

        Raises:
            ValueError: If the store is not initialized.
        """
        if not self._engine:
            raise ValueError("Store not initialized")
        return self._engine

    @property
    def runs_inside_server(self) -> bool:
        """Whether the store is running inside a server.

        Returns:
            Whether the store is running inside a server.
        """
        if ENV_ZENML_SERVER_DEPLOYMENT_TYPE in os.environ:
            return True
        return False

    @property
    def alembic(self) -> Alembic:
        """The Alembic wrapper.

        Returns:
            The Alembic wrapper.

        Raises:
            ValueError: If the store is not initialized.
        """
        if not self._alembic:
            raise ValueError("Store not initialized")
        return self._alembic

    @classmethod
    def filter_and_paginate(
        cls,
        session: Session,
        query: Union[Select[AnySchema], SelectOfScalar[AnySchema]],
        table: Type[AnySchema],
        filter_model: BaseFilterModel,
        custom_schema_to_model_conversion: Optional[
            Callable[[AnySchema], B]
        ] = None,
        custom_fetch: Optional[
            Callable[
                [
                    Session,
                    Union[Select[AnySchema], SelectOfScalar[AnySchema]],
                    BaseFilterModel,
                ],
                List[AnySchema],
            ]
        ] = None,
    ) -> Page[B]:
        """Given a query, return a Page instance with a list of filtered Models.

        Args:
            session: The SQLModel Session
            query: The query to execute
            table: The table to select from
            filter_model: The filter to use, including pagination and sorting
            custom_schema_to_model_conversion: Callable to convert the schema
                into a model. This is used if the Model contains additional
                data that is not explicitly stored as a field or relationship
                on the model.
            custom_fetch: Custom callable to use to fetch items from the
                database for a given query. This is used if the items fetched
                from the database need to be processed differently (e.g. to
                perform additional filtering). The callable should take a
                `Session`, a `Select` query and a `BaseFilterModel` filter as
                arguments and return a `List` of items.

        Returns:
            The Domain Model representation of the DB resource

        Raises:
            ValueError: if the filtered page number is out of bounds.
            RuntimeError: if the schema does not have a `to_model` method.
        """
        query = filter_model.apply_filter(query=query, table=table)

        # Get the total amount of items in the database for a given query
        if custom_fetch:
            total = len(custom_fetch(session, query, filter_model))
        else:
            total = session.scalar(
                select([func.count("*")]).select_from(
                    query.options(noload("*")).subquery()
                )
            )

        # Sorting
        column, operand = filter_model.sorting_params
        if operand == SorterOps.DESCENDING:
            query = query.order_by(desc(getattr(table, column)))
        else:
            query = query.order_by(asc(getattr(table, column)))

        # Get the total amount of pages in the database for a given query
        if total == 0:
            total_pages = 1
        else:
            total_pages = math.ceil(total / filter_model.size)

        if filter_model.page > total_pages:
            raise ValueError(
                f"Invalid page {filter_model.page}. The requested page size is "
                f"{filter_model.size} and there are a total of {total} items "
                f"for this query. The maximum page value therefore is "
                f"{total_pages}."
            )

        # Get a page of the actual data
        item_schemas: List[AnySchema]
        if custom_fetch:
            item_schemas = custom_fetch(session, query, filter_model)
            # select the items in the current page
            item_schemas = item_schemas[
                filter_model.offset : filter_model.offset + filter_model.size
            ]
        else:
            item_schemas = (
                session.exec(
                    query.limit(filter_model.size).offset(filter_model.offset)
                )
                .unique()
                .all()
            )

        # Convert this page of items from schemas to models.
        items: List[B] = []
        for schema in item_schemas:
            # If a custom conversion function is provided, use it.
            if custom_schema_to_model_conversion:
                items.append(custom_schema_to_model_conversion(schema))
                continue
            # Otherwise, try to use the `to_model` method of the schema.
            to_model = getattr(schema, "to_model", None)
            if callable(to_model):
                items.append(to_model())
                continue
            # If neither of the above work, raise an error.
            raise RuntimeError(
                f"Cannot convert schema `{schema.__class__.__name__}` to model "
                "since it does not have a `to_model` method."
            )

        return Page(
            total=total,
            total_pages=total_pages,
            items=items,
            index=filter_model.page,
            max_size=filter_model.size,
        )

    # ====================================
    # ZenML Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the SQL store.

        Raises:
            OperationalError: If connecting to the database failed.
        """
        logger.debug("Initializing SqlZenStore at %s", self.config.url)

        url, connect_args, engine_args = self.config.get_sqlmodel_config()
        self._engine = create_engine(
            url=url, connect_args=connect_args, **engine_args
        )

        # SQLite: As long as the parent directory exists, SQLAlchemy will
        # automatically create the database.
        if (
            self.config.driver == SQLDatabaseDriver.SQLITE
            and self.config.database
            and not fileio.exists(self.config.database)
        ):
            fileio.makedirs(os.path.dirname(self.config.database))

        # MySQL: We might need to create the database manually.
        # To do so, we create a new engine that connects to the `mysql` database
        # and then create the desired database.
        # See https://stackoverflow.com/a/8977109
        if (
            self.config.driver == SQLDatabaseDriver.MYSQL
            and self.config.database
        ):
            try:
                self._engine.connect()
            except OperationalError as e:
                logger.debug(
                    "Failed to connect to mysql database `%s`.",
                    self._engine.url.database,
                )

                if _is_mysql_missing_database_error(e):
                    self._create_mysql_database(
                        url=self._engine.url,
                        connect_args=connect_args,
                        engine_args=engine_args,
                    )
                else:
                    raise

        self._alembic = Alembic(self.engine)
        if (
            not self.skip_migrations
            and ENV_ZENML_DISABLE_DATABASE_MIGRATION not in os.environ
        ):
            self.migrate_database()

    def _create_mysql_database(
        self,
        url: URL,
        connect_args: Dict[str, Any],
        engine_args: Dict[str, Any],
    ) -> None:
        """Creates a mysql database.

        Args:
            url: The URL of the database to create.
            connect_args: Connect arguments for the SQLAlchemy engine.
            engine_args: Additional initialization arguments for the SQLAlchemy
                engine
        """
        logger.info("Trying to create database %s.", url.database)
        master_url = url._replace(database=None)
        master_engine = create_engine(
            url=master_url, connect_args=connect_args, **engine_args
        )
        query = f"CREATE DATABASE IF NOT EXISTS {self.config.database}"
        try:
            connection = master_engine.connect()
            connection.execute(text(query))
        finally:
            connection.close()

    def migrate_database(self) -> None:
        """Migrate the database to the head as defined by the python package."""
        alembic_logger = logging.getLogger("alembic")

        # remove all existing handlers
        while len(alembic_logger.handlers):
            alembic_logger.removeHandler(alembic_logger.handlers[0])

        logging_level = get_logging_level()

        # suppress alembic info logging if the zenml logging level is not debug
        if logging_level == LoggingLevels.DEBUG:
            alembic_logger.setLevel(logging.DEBUG)
        else:
            alembic_logger.setLevel(logging.WARNING)

        alembic_logger.addHandler(get_console_handler())

        # We need to account for 3 distinct cases here:
        # 1. the database is completely empty (not initialized)
        # 2. the database is not empty, but has never been migrated with alembic
        #   before (i.e. was created with SQLModel back when alembic wasn't
        #   used)
        # 3. the database is not empty and has been migrated with alembic before
        revisions = self.alembic.current_revisions()
        if len(revisions) >= 1:
            if len(revisions) > 1:
                logger.warning(
                    "The ZenML database has more than one migration head "
                    "revision. This is not expected and might indicate a "
                    "database migration problem. Please raise an issue on "
                    "GitHub if you encounter this."
                )
            # Case 3: the database has been migrated with alembic before. Just
            # upgrade to the latest revision.
            self.alembic.upgrade()
        else:
            if self.alembic.db_is_empty():
                # Case 1: the database is empty. We can just create the
                # tables from scratch with alembic.
                self.alembic.upgrade()
            else:
                # Case 2: the database is not empty, but has never been
                # migrated with alembic before. We need to create the alembic
                # version table, initialize it with the first revision where we
                # introduced alembic and then upgrade to the latest revision.
                self.alembic.stamp(ZENML_ALEMBIC_START_REVISION)
                self.alembic.upgrade()

        # If an alembic migration took place, all non-custom flavors are purged
        #  and the FlavorRegistry recreates all in-built and integration
        #  flavors in the db.
        revisions_afterwards = self.alembic.current_revisions()

        if revisions != revisions_afterwards:
            self._sync_flavors()

    def _sync_flavors(self) -> None:
        """Purge all in-built and integration flavors from the DB and sync."""
        FlavorRegistry().register_flavors(store=self)

    def get_store_info(self) -> ServerModel:
        """Get information about the store.

        Returns:
            Information about the store.

        Raises:
            KeyError: If the deployment ID could not be loaded from the
                database.
        """
        model = super().get_store_info()
        sql_url = make_url(self.config.url)
        model.database_type = ServerDatabaseType(sql_url.drivername)

        # Fetch the deployment ID from the database and use it to replace the one
        # fetched from the global configuration
        with Session(self.engine) as session:
            identity = session.exec(select(IdentitySchema)).first()

            if identity is None:
                raise KeyError(
                    "The deployment ID could not be loaded from the database."
                )
            model.id = identity.id
        return model

    # ------
    # Stacks
    # ------
    @track(AnalyticsEvent.REGISTERED_STACK, v2=True)
    def create_stack(self, stack: StackRequestModel) -> StackResponseModel:
        """Register a new stack.

        Args:
            stack: The stack to register.

        Returns:
            The registered stack.
        """
        with Session(self.engine) as session:
            self._fail_if_stack_with_name_exists_for_user(
                stack=stack, session=session
            )

            if stack.is_shared:
                self._fail_if_stack_with_name_already_shared(
                    stack=stack, session=session
                )

            # Get the Schemas of all components mentioned
            component_ids = (
                [
                    component_id
                    for list_of_component_ids in stack.components.values()
                    for component_id in list_of_component_ids
                ]
                if stack.components is not None
                else []
            )
            filters = [
                (StackComponentSchema.id == component_id)
                for component_id in component_ids
            ]

            defined_components = session.exec(
                select(StackComponentSchema).where(or_(*filters))
            ).all()

            new_stack_schema = StackSchema(
                workspace_id=stack.workspace,
                user_id=stack.user,
                is_shared=stack.is_shared,
                name=stack.name,
                description=stack.description,
                components=defined_components,
            )

            session.add(new_stack_schema)
            session.commit()
            session.refresh(new_stack_schema)

            return new_stack_schema.to_model()

    def get_stack(self, stack_id: UUID) -> StackResponseModel:
        """Get a stack by its unique ID.

        Args:
            stack_id: The ID of the stack to get.

        Returns:
            The stack with the given ID.

        Raises:
            KeyError: if the stack doesn't exist.
        """
        with Session(self.engine) as session:
            stack = session.exec(
                select(StackSchema).where(StackSchema.id == stack_id)
            ).first()

            if stack is None:
                raise KeyError(f"Stack with ID {stack_id} not found.")
            return stack.to_model()

    def list_stacks(
        self, stack_filter_model: StackFilterModel
    ) -> Page[StackResponseModel]:
        """List all stacks matching the given filter criteria.

        Args:
            stack_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all stacks matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(StackSchema)
            if stack_filter_model.component_id:
                query = query.where(
                    StackCompositionSchema.stack_id == StackSchema.id
                ).where(
                    StackCompositionSchema.component_id
                    == stack_filter_model.component_id
                )
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=StackSchema,
                filter_model=stack_filter_model,
            )

    @track(AnalyticsEvent.UPDATED_STACK, v2=True)
    def update_stack(
        self, stack_id: UUID, stack_update: StackUpdateModel
    ) -> StackResponseModel:
        """Update a stack.

        Args:
            stack_id: The ID of the stack update.
            stack_update: The update request on the stack.

        Returns:
            The updated stack.

        Raises:
            KeyError: if the stack doesn't exist.
            IllegalOperationError: if the stack is a default stack.
        """
        with Session(self.engine) as session:
            # Check if stack with the domain key (name, workspace, owner) already
            #  exists
            existing_stack = session.exec(
                select(StackSchema).where(StackSchema.id == stack_id)
            ).first()
            if existing_stack is None:
                raise KeyError(
                    f"Unable to update stack with id '{stack_id}': Found no"
                    f"existing stack with this id."
                )
            if existing_stack.name == DEFAULT_STACK_NAME:
                raise IllegalOperationError(
                    "The default stack cannot be modified."
                )
            # In case of a renaming update, make sure no stack already exists
            # with that name
            if stack_update.name:
                if existing_stack.name != stack_update.name:
                    self._fail_if_stack_with_name_exists_for_user(
                        stack=stack_update, session=session
                    )

            # Check if stack update makes the stack a shared stack. In that
            # case, check if a stack with the same name is already shared
            # within the workspace
            if stack_update.is_shared:
                if not existing_stack.is_shared and stack_update.is_shared:
                    self._fail_if_stack_with_name_already_shared(
                        stack=stack_update, session=session
                    )

            components = []
            if stack_update.components:
                filters = [
                    (StackComponentSchema.id == component_id)
                    for list_of_component_ids in stack_update.components.values()
                    for component_id in list_of_component_ids
                ]
                components = session.exec(
                    select(StackComponentSchema).where(or_(*filters))
                ).all()

            existing_stack.update(
                stack_update=stack_update,
                components=components,
            )

            session.add(existing_stack)
            session.commit()
            session.refresh(existing_stack)

            return existing_stack.to_model()

    @track(AnalyticsEvent.DELETED_STACK)
    def delete_stack(self, stack_id: UUID) -> None:
        """Delete a stack.

        Args:
            stack_id: The ID of the stack to delete.

        Raises:
            KeyError: if the stack doesn't exist.
            IllegalOperationError: if the stack is a default stack.
        """
        with Session(self.engine) as session:
            try:
                stack = session.exec(
                    select(StackSchema).where(StackSchema.id == stack_id)
                ).one()

                if stack is None:
                    raise KeyError(f"Stack with ID {stack_id} not found.")
                if stack.name == DEFAULT_STACK_NAME:
                    raise IllegalOperationError(
                        "The default stack cannot be deleted."
                    )
                session.delete(stack)
            except NoResultFound as error:
                raise KeyError from error

            session.commit()

    def _fail_if_stack_with_name_exists_for_user(
        self,
        stack: StackRequestModel,
        session: Session,
    ) -> None:
        """Raise an exception if a Component with same name exists for user.

        Args:
            stack: The Stack
            session: The Session

        Returns:
            None

        Raises:
            StackExistsError: If a Stack with the given name is already
                                       owned by the user
        """
        existing_domain_stack = session.exec(
            select(StackSchema)
            .where(StackSchema.name == stack.name)
            .where(StackSchema.workspace_id == stack.workspace)
            .where(StackSchema.user_id == stack.user)
        ).first()
        if existing_domain_stack is not None:
            workspace = self._get_workspace_schema(
                workspace_name_or_id=stack.workspace, session=session
            )
            user = self._get_user_schema(
                user_name_or_id=stack.user, session=session
            )
            raise StackExistsError(
                f"Unable to register stack with name "
                f"'{stack.name}': Found an existing stack with the same "
                f"name in the active workspace, '{workspace.name}', owned by the "
                f"same user, '{user.name}'."
            )
        return None

    def _fail_if_stack_with_name_already_shared(
        self,
        stack: StackRequestModel,
        session: Session,
    ) -> None:
        """Raise an exception if a Stack with same name is already shared.

        Args:
            stack: The Stack
            session: The Session

        Raises:
            StackExistsError: If a stack with the given name is already shared
                              by a user.
        """
        # Check if component with the same name, type is already shared
        # within the workspace
        existing_shared_stack = session.exec(
            select(StackSchema)
            .where(StackSchema.name == stack.name)
            .where(StackSchema.workspace_id == stack.workspace)
            .where(StackSchema.is_shared == stack.is_shared)
        ).first()
        if existing_shared_stack is not None:
            workspace = self._get_workspace_schema(
                workspace_name_or_id=stack.workspace, session=session
            )
            error_msg = (
                f"Unable to share stack with name '{stack.name}': Found an "
                f"existing shared stack with the same name in workspace "
                f"'{workspace.name}'"
            )
            if existing_shared_stack.user_id:
                owner_of_shared = self._get_user_schema(
                    existing_shared_stack.user_id, session=session
                )
                error_msg += f" owned by '{owner_of_shared.name}'."
            else:
                error_msg += ", which is currently not owned by any user."
            raise StackExistsError(error_msg)

    # ----------------
    # Stack components
    # ----------------
    @track(AnalyticsEvent.REGISTERED_STACK_COMPONENT, v2=True)
    def create_stack_component(
        self,
        component: ComponentRequestModel,
    ) -> ComponentResponseModel:
        """Create a stack component.

        Args:
            component: The stack component to create.

        Returns:
            The created stack component.

        Raises:
            KeyError: if the stack component references a non-existent
                connector.
        """
        with Session(self.engine) as session:
            self._fail_if_component_with_name_type_exists_for_user(
                name=component.name,
                component_type=component.type,
                user_id=component.user,
                workspace_id=component.workspace,
                session=session,
            )

            if component.is_shared:
                self._fail_if_component_with_name_type_already_shared(
                    name=component.name,
                    component_type=component.type,
                    workspace_id=component.workspace,
                    session=session,
                )

            service_connector: Optional[ServiceConnectorSchema] = None
            if component.connector:
                service_connector = session.exec(
                    select(ServiceConnectorSchema).where(
                        ServiceConnectorSchema.id == component.connector
                    )
                ).first()

                if service_connector is None:
                    raise KeyError(
                        f"Service connector with ID {component.connector} not "
                        "found."
                    )

            # Create the component
            new_component = StackComponentSchema(
                name=component.name,
                workspace_id=component.workspace,
                user_id=component.user,
                is_shared=component.is_shared,
                type=component.type,
                flavor=component.flavor,
                configuration=base64.b64encode(
                    json.dumps(component.configuration).encode("utf-8")
                ),
                labels=base64.b64encode(
                    json.dumps(component.labels).encode("utf-8")
                ),
                connector=service_connector,
                connector_resource_id=component.connector_resource_id,
            )

            session.add(new_component)
            session.commit()

            session.refresh(new_component)

            return new_component.to_model()

    def get_stack_component(
        self, component_id: UUID
    ) -> ComponentResponseModel:
        """Get a stack component by ID.

        Args:
            component_id: The ID of the stack component to get.

        Returns:
            The stack component.

        Raises:
            KeyError: if the stack component doesn't exist.
        """
        with Session(self.engine) as session:
            stack_component = session.exec(
                select(StackComponentSchema).where(
                    StackComponentSchema.id == component_id
                )
            ).first()

            if stack_component is None:
                raise KeyError(
                    f"Stack component with ID {component_id} not found."
                )

            return stack_component.to_model()

    def list_stack_components(
        self, component_filter_model: ComponentFilterModel
    ) -> Page[ComponentResponseModel]:
        """List all stack components matching the given filter criteria.

        Args:
            component_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all stack components matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(StackComponentSchema)
            paged_components: Page[
                ComponentResponseModel
            ] = self.filter_and_paginate(
                session=session,
                query=query,
                table=StackComponentSchema,
                filter_model=component_filter_model,
            )
            return paged_components

    @track(AnalyticsEvent.UPDATED_STACK_COMPONENT)
    def update_stack_component(
        self, component_id: UUID, component_update: ComponentUpdateModel
    ) -> ComponentResponseModel:
        """Update an existing stack component.

        Args:
            component_id: The ID of the stack component to update.
            component_update: The update to be applied to the stack component.

        Returns:
            The updated stack component.

        Raises:
            KeyError: if the stack component doesn't exist.
            IllegalOperationError: if the stack component is a default stack
                component.
        """
        with Session(self.engine) as session:
            existing_component = session.exec(
                select(StackComponentSchema).where(
                    StackComponentSchema.id == component_id
                )
            ).first()

            if existing_component is None:
                raise KeyError(
                    f"Unable to update component with id "
                    f"'{component_id}': Found no"
                    f"existing component with this id."
                )

            if (
                existing_component.name == DEFAULT_STACK_COMPONENT_NAME
                and existing_component.type
                in [
                    StackComponentType.ORCHESTRATOR,
                    StackComponentType.ARTIFACT_STORE,
                ]
            ):
                raise IllegalOperationError(
                    f"The default {existing_component.type} cannot be modified."
                )

            # In case of a renaming update, make sure no component of the same
            # type already exists with that name
            if component_update.name:
                if (
                    existing_component.name != component_update.name
                    and existing_component.user_id is not None
                ):
                    self._fail_if_component_with_name_type_exists_for_user(
                        name=component_update.name,
                        component_type=existing_component.type,
                        workspace_id=existing_component.workspace_id,
                        user_id=existing_component.user_id,
                        session=session,
                    )

            # Check if component update makes the component a shared component,
            # In that case check if a component with the same name, type are
            # already shared within the workspace
            if component_update.is_shared:
                if (
                    not existing_component.is_shared
                    and component_update.is_shared
                ):
                    self._fail_if_component_with_name_type_already_shared(
                        name=component_update.name or existing_component.name,
                        component_type=existing_component.type,
                        workspace_id=existing_component.workspace_id,
                        session=session,
                    )

            existing_component.update(component_update=component_update)

            service_connector: Optional[ServiceConnectorSchema] = None
            if component_update.connector:
                service_connector = session.exec(
                    select(ServiceConnectorSchema).where(
                        ServiceConnectorSchema.id == component_update.connector
                    )
                ).first()

                if service_connector is None:
                    raise KeyError(
                        "Service connector with ID "
                        f"{component_update.connector} not found."
                    )

            if service_connector:
                existing_component.connector = service_connector

            session.add(existing_component)
            session.commit()

            return existing_component.to_model()

    @track(AnalyticsEvent.DELETED_STACK_COMPONENT)
    def delete_stack_component(self, component_id: UUID) -> None:
        """Delete a stack component.

        Args:
            component_id: The id of the stack component to delete.

        Raises:
            KeyError: if the stack component doesn't exist.
            IllegalOperationError: if the stack component is part of one or
                more stacks, or if it's a default stack component.
        """
        with Session(self.engine) as session:
            try:
                stack_component = session.exec(
                    select(StackComponentSchema).where(
                        StackComponentSchema.id == component_id
                    )
                ).one()

                if stack_component is None:
                    raise KeyError(f"Stack with ID {component_id} not found.")
                if (
                    stack_component.name == DEFAULT_STACK_COMPONENT_NAME
                    and stack_component.type
                    in [
                        StackComponentType.ORCHESTRATOR,
                        StackComponentType.ARTIFACT_STORE,
                    ]
                ):
                    raise IllegalOperationError(
                        f"The default {stack_component.type} cannot be deleted."
                    )

                if len(stack_component.stacks) > 0:
                    raise IllegalOperationError(
                        f"Stack Component `{stack_component.name}` of type "
                        f"`{stack_component.type} cannot be "
                        f"deleted as it is part of "
                        f"{len(stack_component.stacks)} stacks. "
                        f"Before deleting this stack "
                        f"component, make sure to remove it "
                        f"from all stacks."
                    )
                else:
                    session.delete(stack_component)
            except NoResultFound as error:
                raise KeyError from error

            session.commit()

    @staticmethod
    def _fail_if_component_with_name_type_exists_for_user(
        name: str,
        component_type: StackComponentType,
        workspace_id: UUID,
        user_id: UUID,
        session: Session,
    ) -> None:
        """Raise an exception if a Component with same name/type exists.

        Args:
            name: The name of the component
            component_type: The type of the component
            workspace_id: The ID of the workspace
            user_id: The ID of the user
            session: The Session

        Returns:
            None

        Raises:
            StackComponentExistsError: If a component with the given name and
                                       type is already owned by the user
        """
        assert user_id
        # Check if component with the same domain key (name, type, workspace,
        # owner) already exists
        existing_domain_component = session.exec(
            select(StackComponentSchema)
            .where(StackComponentSchema.name == name)
            .where(StackComponentSchema.workspace_id == workspace_id)
            .where(StackComponentSchema.user_id == user_id)
            .where(StackComponentSchema.type == component_type)
        ).first()
        if existing_domain_component is not None:
            # Theoretically the user schema is optional, in this case there is
            #  no way that it will be None
            assert existing_domain_component.user
            raise StackComponentExistsError(
                f"Unable to register '{component_type.value}' component "
                f"with name '{name}': Found an existing "
                f"component with the same name and type in the same "
                f" workspace, '{existing_domain_component.workspace.name}', "
                f"owned by the same user, "
                f"'{existing_domain_component.user.name}'."
            )
        return None

    @staticmethod
    def _fail_if_component_with_name_type_already_shared(
        name: str,
        component_type: StackComponentType,
        workspace_id: UUID,
        session: Session,
    ) -> None:
        """Raise an exception if a Component with same name/type already shared.

        Args:
            name: The name of the component
            component_type: The type of the component
            workspace_id: The ID of the workspace
            session: The Session

        Raises:
            StackComponentExistsError: If a component with the given name and
                type is already shared by a user
        """
        # Check if component with the same name, type is already shared
        # within the workspace
        is_shared = True
        existing_shared_component = session.exec(
            select(StackComponentSchema)
            .where(StackComponentSchema.name == name)
            .where(StackComponentSchema.workspace_id == workspace_id)
            .where(StackComponentSchema.type == component_type)
            .where(StackComponentSchema.is_shared == is_shared)
        ).first()
        if existing_shared_component is not None:
            raise StackComponentExistsError(
                f"Unable to shared component of type '{component_type.value}' "
                f"with name '{name}': Found an existing shared "
                f"component with the same name and type in workspace "
                f"'{workspace_id}'."
            )

    # -----------------------
    # Stack component flavors
    # -----------------------

    @track(AnalyticsEvent.CREATED_FLAVOR, v1=False, v2=True)
    def create_flavor(self, flavor: FlavorRequestModel) -> FlavorResponseModel:
        """Creates a new stack component flavor.

        Args:
            flavor: The stack component flavor to create.

        Returns:
            The newly created flavor.

        Raises:
            EntityExistsError: If a flavor with the same name and type
                is already owned by this user in this workspace.
            ValueError: In case the config_schema string exceeds the max length.
        """
        with Session(self.engine) as session:
            # Check if flavor with the same domain key (name, type, workspace,
            # owner) already exists
            existing_flavor = session.exec(
                select(FlavorSchema)
                .where(FlavorSchema.name == flavor.name)
                .where(FlavorSchema.type == flavor.type)
                .where(FlavorSchema.workspace_id == flavor.workspace)
                .where(FlavorSchema.user_id == flavor.user)
            ).first()

            if existing_flavor is not None:
                raise EntityExistsError(
                    f"Unable to register '{flavor.type.value}' flavor "
                    f"with name '{flavor.name}': Found an existing "
                    f"flavor with the same name and type in the same "
                    f"'{flavor.workspace}' workspace owned by the same "
                    f"'{flavor.user}' user."
                )

            config_schema = json.dumps(flavor.config_schema)

            if len(config_schema) > TEXT_FIELD_MAX_LENGTH:
                raise ValueError(
                    "Json representation of configuration schema"
                    "exceeds max length."
                )

            else:
                new_flavor = FlavorSchema(
                    name=flavor.name,
                    type=flavor.type,
                    source=flavor.source,
                    config_schema=config_schema,
                    integration=flavor.integration,
                    connector_type=flavor.connector_type,
                    connector_resource_type=flavor.connector_resource_type,
                    connector_resource_id_attr=flavor.connector_resource_id_attr,
                    workspace_id=flavor.workspace,
                    user_id=flavor.user,
                    logo_url=flavor.logo_url,
                    docs_url=flavor.docs_url,
                    sdk_docs_url=flavor.sdk_docs_url,
                    is_custom=flavor.is_custom,
                )
                session.add(new_flavor)
                session.commit()

                return new_flavor.to_model()

    def update_flavor(
        self, flavor_id: UUID, flavor_update: FlavorUpdateModel
    ) -> FlavorResponseModel:
        """Updates an existing user.

        Args:
            flavor_id: The id of the flavor to update.
            flavor_update: The update to be applied to the flavor.

        Returns:
            The updated flavor.

        Raises:
            KeyError: If no flavor with the given id exists.
        """
        with Session(self.engine) as session:
            existing_flavor = session.exec(
                select(FlavorSchema).where(FlavorSchema.id == flavor_id)
            ).first()

            if not existing_flavor:
                raise KeyError(f"Flavor with ID {flavor_id} not found.")

            existing_flavor.update(flavor_update=flavor_update)
            session.add(existing_flavor)
            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_flavor)
            return existing_flavor.to_model()

    def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
        """Get a flavor by ID.

        Args:
            flavor_id: The ID of the flavor to fetch.

        Returns:
            The stack component flavor.

        Raises:
            KeyError: if the stack component flavor doesn't exist.
        """
        with Session(self.engine) as session:
            flavor_in_db = session.exec(
                select(FlavorSchema).where(FlavorSchema.id == flavor_id)
            ).first()
            if flavor_in_db is None:
                raise KeyError(f"Flavor with ID {flavor_id} not found.")
            return flavor_in_db.to_model()

    def list_flavors(
        self, flavor_filter_model: FlavorFilterModel
    ) -> Page[FlavorResponseModel]:
        """List all stack component flavors matching the given filter criteria.

        Args:
            flavor_filter_model: All filter parameters including pagination
                params

        Returns:
            List of all the stack component flavors matching the given criteria.
        """
        with Session(self.engine) as session:
            query = select(FlavorSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=FlavorSchema,
                filter_model=flavor_filter_model,
            )

    @track(AnalyticsEvent.DELETED_FLAVOR)
    def delete_flavor(self, flavor_id: UUID) -> None:
        """Delete a flavor.

        Args:
            flavor_id: The id of the flavor to delete.

        Raises:
            KeyError: if the flavor doesn't exist.
            IllegalOperationError: if the flavor is used by a stack component.
        """
        with Session(self.engine) as session:
            try:
                flavor_in_db = session.exec(
                    select(FlavorSchema).where(FlavorSchema.id == flavor_id)
                ).one()

                if flavor_in_db is None:
                    raise KeyError(f"Flavor with ID {flavor_id} not found.")
                components_of_flavor = session.exec(
                    select(StackComponentSchema).where(
                        StackComponentSchema.flavor == flavor_in_db.name
                    )
                ).all()
                if len(components_of_flavor) > 0:
                    raise IllegalOperationError(
                        f"Stack Component `{flavor_in_db.name}` of type "
                        f"`{flavor_in_db.type} cannot be "
                        f"deleted as it is used by "
                        f"{len(components_of_flavor)} "
                        f"components. Before deleting this "
                        f"flavor, make sure to delete all "
                        f"associated components."
                    )
                else:
                    session.delete(flavor_in_db)
                    session.commit()
            except NoResultFound as error:
                raise KeyError from error

    # -----
    # Users
    # -----

    @track(AnalyticsEvent.CREATED_USER)
    def create_user(self, user: UserRequestModel) -> UserResponseModel:
        """Creates a new user.

        Args:
            user: User to be created.

        Returns:
            The newly created user.

        Raises:
            EntityExistsError: If a user with the given name already exists.
        """
        with Session(self.engine) as session:
            # Check if user with the given name already exists
            existing_user = session.exec(
                select(UserSchema).where(UserSchema.name == user.name)
            ).first()
            if existing_user is not None:
                raise EntityExistsError(
                    f"Unable to create user with name '{user.name}': "
                    f"Found existing user with this name."
                )

            # Create the user
            new_user = UserSchema.from_request(user)
            session.add(new_user)
            session.commit()

            return new_user.to_model()

    def get_user(
        self,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        include_private: bool = False,
    ) -> UserResponseModel:
        """Gets a specific user, when no id is specified the active user is returned.

        Raises a KeyError in case a user with that id does not exist.

        Args:
            user_name_or_id: The name or ID of the user to get.
            include_private: Whether to include private user information

        Returns:
            The requested user, if it was found.
        """
        if not user_name_or_id:
            user_name_or_id = self._default_user_name

        with Session(self.engine) as session:
            user = self._get_user_schema(user_name_or_id, session=session)

            return user.to_model(include_private=include_private)

    def get_auth_user(
        self, user_name_or_id: Union[str, UUID]
    ) -> UserAuthModel:
        """Gets the auth model to a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Returns:
            The requested user, if it was found.
        """
        with Session(self.engine) as session:
            user = self._get_user_schema(user_name_or_id, session=session)
            return UserAuthModel(
                id=user.id,
                name=user.name,
                full_name=user.full_name,
                email_opted_in=user.email_opted_in,
                active=user.active,
                created=user.created,
                updated=user.updated,
                password=user.password,
                activation_token=user.activation_token,
            )

    def list_users(
        self, user_filter_model: UserFilterModel
    ) -> Page[UserResponseModel]:
        """List all users.

        Args:
            user_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all users.
        """
        with Session(self.engine) as session:
            query = select(UserSchema)
            paged_user: Page[UserResponseModel] = self.filter_and_paginate(
                session=session,
                query=query,
                table=UserSchema,
                filter_model=user_filter_model,
            )
            return paged_user

    @track(AnalyticsEvent.UPDATED_USER)
    def update_user(
        self, user_id: UUID, user_update: UserUpdateModel
    ) -> UserResponseModel:
        """Updates an existing user.

        Args:
            user_id: The id of the user to update.
            user_update: The update to be applied to the user.

        Returns:
            The updated user.

        Raises:
            IllegalOperationError: If the request tries to update the username
                for the default user account.
        """
        with Session(self.engine) as session:
            existing_user = self._get_user_schema(user_id, session=session)
            if (
                existing_user.name == self._default_user_name
                and "name" in user_update.__fields_set__
                and user_update.name != existing_user.name
            ):
                raise IllegalOperationError(
                    "The username of the default user account cannot be "
                    "changed."
                )
            existing_user.update(user_update=user_update)
            session.add(existing_user)
            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_user)
            return existing_user.to_model()

    @track(AnalyticsEvent.DELETED_USER)
    def delete_user(self, user_name_or_id: Union[str, UUID]) -> None:
        """Deletes a user.

        Args:
            user_name_or_id: The name or the ID of the user to delete.

        Raises:
            IllegalOperationError: If the user is the default user account.
        """
        with Session(self.engine) as session:
            user = self._get_user_schema(user_name_or_id, session=session)
            if user.name == self._default_user_name:
                raise IllegalOperationError(
                    "The default user account cannot be deleted."
                )

            self._trigger_event(StoreEvent.USER_DELETED, user_id=user.id)

            session.delete(user)
            session.commit()

    # -----
    # Teams
    # -----

    @track(AnalyticsEvent.CREATED_TEAM)
    def create_team(self, team: TeamRequestModel) -> TeamResponseModel:
        """Creates a new team.

        Args:
            team: The team model to create.

        Returns:
            The newly created team.

        Raises:
            EntityExistsError: If a team with the given name already exists.
        """
        with Session(self.engine) as session:
            # Check if team with the given name already exists
            existing_team = session.exec(
                select(TeamSchema).where(TeamSchema.name == team.name)
            ).first()
            if existing_team is not None:
                raise EntityExistsError(
                    f"Unable to create team with name '{team.name}': "
                    f"Found existing team with this name."
                )

            defined_users = []
            if team.users:
                # Get the Schemas of all users mentioned
                filters = [
                    (UserSchema.id == user_id) for user_id in team.users
                ]

                defined_users = session.exec(
                    select(UserSchema).where(or_(*filters))
                ).all()

            # Create the team
            new_team = TeamSchema(name=team.name, users=defined_users)
            session.add(new_team)
            session.commit()

            return new_team.to_model()

    def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
        """Gets a specific team.

        Args:
            team_name_or_id: Name or ID of the team to get.

        Returns:
            The requested team.
        """
        with Session(self.engine) as session:
            team = self._get_team_schema(team_name_or_id, session=session)
            return team.to_model()

    def list_teams(
        self, team_filter_model: TeamFilterModel
    ) -> Page[TeamResponseModel]:
        """List all teams matching the given filter criteria.

        Args:
            team_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all teams matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(TeamSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=TeamSchema,
                filter_model=team_filter_model,
            )

    @track(AnalyticsEvent.UPDATED_TEAM)
    def update_team(
        self, team_id: UUID, team_update: TeamUpdateModel
    ) -> TeamResponseModel:
        """Update an existing team.

        Args:
            team_id: The ID of the team to be updated.
            team_update: The update to be applied to the team.

        Returns:
            The updated team.

        Raises:
            KeyError: if the team does not exist.
        """
        with Session(self.engine) as session:
            existing_team = session.exec(
                select(TeamSchema).where(TeamSchema.id == team_id)
            ).first()

            if existing_team is None:
                raise KeyError(
                    f"Unable to update team with id "
                    f"'{team_id}': Found no"
                    f"existing teams with this id."
                )

            # Update the team
            existing_team.update(team_update=team_update)
            existing_team.users = []
            if "users" in team_update.__fields_set__ and team_update.users:
                for user in team_update.users:
                    existing_team.users.append(
                        self._get_user_schema(
                            user_name_or_id=user, session=session
                        )
                    )

            session.add(existing_team)
            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_team)
            return existing_team.to_model()

    @track(AnalyticsEvent.DELETED_TEAM)
    def delete_team(self, team_name_or_id: Union[str, UUID]) -> None:
        """Deletes a team.

        Args:
            team_name_or_id: Name or ID of the team to delete.
        """
        with Session(self.engine) as session:
            team = self._get_team_schema(team_name_or_id, session=session)
            session.delete(team)
            session.commit()

    # -----
    # Roles
    # -----

    @track(AnalyticsEvent.CREATED_ROLE)
    def create_role(self, role: RoleRequestModel) -> RoleResponseModel:
        """Creates a new role.

        Args:
            role: The role model to create.

        Returns:
            The newly created role.

        Raises:
            EntityExistsError: If a role with the given name already exists.
        """
        with Session(self.engine) as session:
            # Check if role with the given name already exists
            existing_role = session.exec(
                select(RoleSchema).where(RoleSchema.name == role.name)
            ).first()
            if existing_role is not None:
                raise EntityExistsError(
                    f"Unable to create role '{role.name}': Role already exists."
                )

            # Create role
            role_schema = RoleSchema.from_request(role)
            session.add(role_schema)
            session.commit()
            # Add all permissions
            for p in role.permissions:
                session.add(
                    RolePermissionSchema(name=p, role_id=role_schema.id)
                )

            session.commit()
            return role_schema.to_model()

    def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
        """Gets a specific role.

        Args:
            role_name_or_id: Name or ID of the role to get.

        Returns:
            The requested role.
        """
        with Session(self.engine) as session:
            role = self._get_role_schema(role_name_or_id, session=session)
            return role.to_model()

    def list_roles(
        self, role_filter_model: RoleFilterModel
    ) -> Page[RoleResponseModel]:
        """List all roles matching the given filter criteria.

        Args:
            role_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all roles matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(RoleSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=RoleSchema,
                filter_model=role_filter_model,
            )

    @track(AnalyticsEvent.UPDATED_ROLE)
    def update_role(
        self, role_id: UUID, role_update: RoleUpdateModel
    ) -> RoleResponseModel:
        """Update an existing role.

        Args:
            role_id: The ID of the role to be updated.
            role_update: The update to be applied to the role.

        Returns:
            The updated role.

        Raises:
            KeyError: if the role does not exist.
            IllegalOperationError: if the role is a system role.
        """
        with Session(self.engine) as session:
            existing_role = session.exec(
                select(RoleSchema).where(RoleSchema.id == role_id)
            ).first()

            if existing_role is None:
                raise KeyError(
                    f"Unable to update role with id "
                    f"'{role_id}': Found no"
                    f"existing roles with this id."
                )

            if existing_role.name in [DEFAULT_ADMIN_ROLE, DEFAULT_GUEST_ROLE]:
                raise IllegalOperationError(
                    f"The built-in role '{existing_role.name}' cannot be "
                    f"updated."
                )

            # The relationship table for roles behaves different from the other
            #  ones. As such the required updates on the permissions have to be
            #  done manually.
            if "permissions" in role_update.__fields_set__:
                existing_permissions = {
                    p.name for p in existing_role.permissions
                }

                diff = existing_permissions.symmetric_difference(
                    role_update.permissions
                )

                for permission in diff:
                    if permission not in role_update.permissions:
                        permission_to_delete = session.exec(
                            select(RolePermissionSchema)
                            .where(RolePermissionSchema.name == permission)
                            .where(
                                RolePermissionSchema.role_id
                                == existing_role.id
                            )
                        ).one_or_none()
                        session.delete(permission_to_delete)

                    elif permission not in existing_permissions:
                        session.add(
                            RolePermissionSchema(
                                name=permission, role_id=existing_role.id
                            )
                        )

            # Update the role
            existing_role.update(role_update=role_update)
            session.add(existing_role)
            session.commit()

            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_role)
            return existing_role.to_model()

    @track(AnalyticsEvent.DELETED_ROLE)
    def delete_role(self, role_name_or_id: Union[str, UUID]) -> None:
        """Deletes a role.

        Args:
            role_name_or_id: Name or ID of the role to delete.

        Raises:
            IllegalOperationError: If the role is still assigned to users or
                the role is one of the built-in roles.
        """
        with Session(self.engine) as session:
            role = self._get_role_schema(role_name_or_id, session=session)
            if role.name in [DEFAULT_ADMIN_ROLE, DEFAULT_GUEST_ROLE]:
                raise IllegalOperationError(
                    f"The built-in role '{role.name}' cannot be deleted."
                )
            user_role = session.exec(
                select(UserRoleAssignmentSchema).where(
                    UserRoleAssignmentSchema.role_id == role.id
                )
            ).all()
            team_role = session.exec(
                select(TeamRoleAssignmentSchema).where(
                    TeamRoleAssignmentSchema.role_id == role.id
                )
            ).all()

            if len(user_role) > 0 or len(team_role) > 0:
                raise IllegalOperationError(
                    f"Role `{role.name}` of type cannot be "
                    f"deleted as it is in use by multiple users and teams. "
                    f"Before deleting this role make sure to remove all "
                    f"instances where this role is used."
                )
            else:
                # Delete role
                session.delete(role)
                session.commit()

    # ----------------
    # Role assignments
    # ----------------

    def list_user_role_assignments(
        self, user_role_assignment_filter_model: UserRoleAssignmentFilterModel
    ) -> Page[UserRoleAssignmentResponseModel]:
        """List all roles assignments matching the given filter criteria.

        Args:
            user_role_assignment_filter_model: All filter parameters including
                pagination params.

        Returns:
            A list of all roles assignments matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(UserRoleAssignmentSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=UserRoleAssignmentSchema,
                filter_model=user_role_assignment_filter_model,
            )

    def create_user_role_assignment(
        self, user_role_assignment: UserRoleAssignmentRequestModel
    ) -> UserRoleAssignmentResponseModel:
        """Assigns a role to a user or team, scoped to a specific workspace.

        Args:
            user_role_assignment: The role assignment to create.

        Returns:
            The created role assignment.

        Raises:
            EntityExistsError: if the role assignment already exists.
        """
        with Session(self.engine) as session:
            role = self._get_role_schema(
                user_role_assignment.role, session=session
            )
            workspace: Optional[WorkspaceSchema] = None
            if user_role_assignment.workspace:
                workspace = self._get_workspace_schema(
                    user_role_assignment.workspace, session=session
                )
            user = self._get_user_schema(
                user_role_assignment.user, session=session
            )
            query = select(UserRoleAssignmentSchema).where(
                UserRoleAssignmentSchema.user_id == user.id,
                UserRoleAssignmentSchema.role_id == role.id,
            )
            if workspace is not None:
                query = query.where(
                    UserRoleAssignmentSchema.workspace_id == workspace.id
                )
            existing_role_assignment = session.exec(query).first()
            if existing_role_assignment is not None:
                raise EntityExistsError(
                    f"Unable to assign role '{role.name}' to user "
                    f"'{user.name}': Role already assigned in this workspace."
                )
            role_assignment = UserRoleAssignmentSchema(
                role_id=role.id,
                user_id=user.id,
                workspace_id=workspace.id if workspace else None,
                role=role,
                user=user,
                workspace=workspace,
            )
            session.add(role_assignment)
            session.commit()
            return role_assignment.to_model()

    def get_user_role_assignment(
        self, user_role_assignment_id: UUID
    ) -> UserRoleAssignmentResponseModel:
        """Gets a role assignment by ID.

        Args:
            user_role_assignment_id: ID of the role assignment to get.

        Returns:
            The role assignment.

        Raises:
            KeyError: If the role assignment does not exist.
        """
        with Session(self.engine) as session:
            user_role = session.exec(
                select(UserRoleAssignmentSchema).where(
                    UserRoleAssignmentSchema.id == user_role_assignment_id
                )
            ).one_or_none()

            if user_role:
                return user_role.to_model()
            else:
                raise KeyError(
                    f"Unable to get user role assignment with ID "
                    f"'{user_role_assignment_id}': No user role assignment "
                    f"with this ID found."
                )

    def delete_user_role_assignment(
        self, user_role_assignment_id: UUID
    ) -> None:
        """Delete a specific role assignment.

        Args:
            user_role_assignment_id: The ID of the specific role assignment.

        Raises:
            KeyError: If the role assignment does not exist.
        """
        with Session(self.engine) as session:
            user_role = session.exec(
                select(UserRoleAssignmentSchema).where(
                    UserRoleAssignmentSchema.id == user_role_assignment_id
                )
            ).one_or_none()
            if not user_role:
                raise KeyError(
                    f"No user role assignment with id "
                    f"{user_role_assignment_id} exists."
                )

            session.delete(user_role)

            session.commit()

    # ---------------------
    # Team Role assignments
    # ---------------------

    def create_team_role_assignment(
        self, team_role_assignment: TeamRoleAssignmentRequestModel
    ) -> TeamRoleAssignmentResponseModel:
        """Creates a new team role assignment.

        Args:
            team_role_assignment: The role assignment model to create.

        Returns:
            The newly created role assignment.

        Raises:
            EntityExistsError: If the role assignment already exists.
        """
        with Session(self.engine) as session:
            role = self._get_role_schema(
                team_role_assignment.role, session=session
            )
            workspace: Optional[WorkspaceSchema] = None
            if team_role_assignment.workspace:
                workspace = self._get_workspace_schema(
                    team_role_assignment.workspace, session=session
                )
            team = self._get_team_schema(
                team_role_assignment.team, session=session
            )
            query = select(UserRoleAssignmentSchema).where(
                UserRoleAssignmentSchema.user_id == team.id,
                UserRoleAssignmentSchema.role_id == role.id,
            )
            if workspace is not None:
                query = query.where(
                    UserRoleAssignmentSchema.workspace_id == workspace.id
                )
            existing_role_assignment = session.exec(query).first()
            if existing_role_assignment is not None:
                raise EntityExistsError(
                    f"Unable to assign role '{role.name}' to team "
                    f"'{team.name}': Role already assigned in this workspace."
                )
            role_assignment = TeamRoleAssignmentSchema(
                role_id=role.id,
                team_id=team.id,
                workspace_id=workspace.id if workspace else None,
                role=role,
                team=team,
                workspace=workspace,
            )
            session.add(role_assignment)
            session.commit()
            return role_assignment.to_model()

    def get_team_role_assignment(
        self, team_role_assignment_id: UUID
    ) -> TeamRoleAssignmentResponseModel:
        """Gets a specific role assignment.

        Args:
            team_role_assignment_id: ID of the role assignment to get.

        Returns:
            The requested role assignment.

        Raises:
            KeyError: If no role assignment with the given ID exists.
        """
        with Session(self.engine) as session:
            team_role = session.exec(
                select(TeamRoleAssignmentSchema).where(
                    TeamRoleAssignmentSchema.id == team_role_assignment_id
                )
            ).one_or_none()

            if team_role:
                return team_role.to_model()
            else:
                raise KeyError(
                    f"Unable to get team role assignment with ID "
                    f"'{team_role_assignment_id}': No team role assignment "
                    f"with this ID found."
                )

    def delete_team_role_assignment(
        self, team_role_assignment_id: UUID
    ) -> None:
        """Delete a specific role assignment.

        Args:
            team_role_assignment_id: The ID of the specific role assignment

        Raises:
            KeyError: If the role assignment does not exist.
        """
        with Session(self.engine) as session:
            team_role = session.exec(
                select(TeamRoleAssignmentSchema).where(
                    TeamRoleAssignmentSchema.id == team_role_assignment_id
                )
            ).one_or_none()
            if not team_role:
                raise KeyError(
                    f"No team role assignment with id "
                    f"{team_role_assignment_id} exists."
                )

            session.delete(team_role)

            session.commit()

    def list_team_role_assignments(
        self, team_role_assignment_filter_model: TeamRoleAssignmentFilterModel
    ) -> Page[TeamRoleAssignmentResponseModel]:
        """List all roles assignments matching the given filter criteria.

        Args:
            team_role_assignment_filter_model: All filter parameters including
                pagination params.

        Returns:
            A list of all roles assignments matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(TeamRoleAssignmentSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=TeamRoleAssignmentSchema,
                filter_model=team_role_assignment_filter_model,
            )

    # --------
    # Workspaces
    # --------

    @track(AnalyticsEvent.CREATED_WORKSPACE, v2=True)
    def create_workspace(
        self, workspace: WorkspaceRequestModel
    ) -> WorkspaceResponseModel:
        """Creates a new workspace.

        Args:
            workspace: The workspace to create.

        Returns:
            The newly created workspace.

        Raises:
            EntityExistsError: If a workspace with the given name already exists.
        """
        with Session(self.engine) as session:
            # Check if workspace with the given name already exists
            existing_workspace = session.exec(
                select(WorkspaceSchema).where(
                    WorkspaceSchema.name == workspace.name
                )
            ).first()
            if existing_workspace is not None:
                raise EntityExistsError(
                    f"Unable to create workspace {workspace.name}: "
                    "A workspace with this name already exists."
                )

            # Create the workspace
            new_workspace = WorkspaceSchema.from_request(workspace)
            session.add(new_workspace)
            session.commit()

            # Explicitly refresh the new_workspace schema
            session.refresh(new_workspace)

            return new_workspace.to_model()

    def get_workspace(
        self, workspace_name_or_id: Union[str, UUID]
    ) -> WorkspaceResponseModel:
        """Get an existing workspace by name or ID.

        Args:
            workspace_name_or_id: Name or ID of the workspace to get.

        Returns:
            The requested workspace if one was found.
        """
        with Session(self.engine) as session:
            workspace = self._get_workspace_schema(
                workspace_name_or_id, session=session
            )
        return workspace.to_model()

    def list_workspaces(
        self, workspace_filter_model: WorkspaceFilterModel
    ) -> Page[WorkspaceResponseModel]:
        """List all workspace matching the given filter criteria.

        Args:
            workspace_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all workspace matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(WorkspaceSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=WorkspaceSchema,
                filter_model=workspace_filter_model,
            )

    @track(AnalyticsEvent.UPDATED_WORKSPACE)
    def update_workspace(
        self, workspace_id: UUID, workspace_update: WorkspaceUpdateModel
    ) -> WorkspaceResponseModel:
        """Update an existing workspace.

        Args:
            workspace_id: The ID of the workspace to be updated.
            workspace_update: The update to be applied to the workspace.

        Returns:
            The updated workspace.

        Raises:
            IllegalOperationError: if the workspace is the default workspace.
            KeyError: if the workspace does not exist.
        """
        with Session(self.engine) as session:
            existing_workspace = session.exec(
                select(WorkspaceSchema).where(
                    WorkspaceSchema.id == workspace_id
                )
            ).first()
            if existing_workspace is None:
                raise KeyError(
                    f"Unable to update workspace with id "
                    f"'{workspace_id}': Found no"
                    f"existing workspaces with this id."
                )
            if (
                existing_workspace.name == self._default_workspace_name
                and "name" in workspace_update.__fields_set__
                and workspace_update.name != existing_workspace.name
            ):
                raise IllegalOperationError(
                    "The name of the default workspace cannot be changed."
                )

            # Update the workspace
            existing_workspace.update(workspace_update=workspace_update)
            session.add(existing_workspace)
            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_workspace)
            return existing_workspace.to_model()

    @track(AnalyticsEvent.DELETED_WORKSPACE)
    def delete_workspace(self, workspace_name_or_id: Union[str, UUID]) -> None:
        """Deletes a workspace.

        Args:
            workspace_name_or_id: Name or ID of the workspace to delete.

        Raises:
            IllegalOperationError: If the workspace is the default workspace.
        """
        with Session(self.engine) as session:
            # Check if workspace with the given name exists
            workspace = self._get_workspace_schema(
                workspace_name_or_id, session=session
            )
            if workspace.name == self._default_workspace_name:
                raise IllegalOperationError(
                    "The default workspace cannot be deleted."
                )

            self._trigger_event(
                StoreEvent.WORKSPACE_DELETED, workspace_id=workspace.id
            )

            session.delete(workspace)
            session.commit()

    # ---------
    # Pipelines
    # ---------
    @track(AnalyticsEvent.CREATE_PIPELINE, v2=True)
    def create_pipeline(
        self,
        pipeline: PipelineRequestModel,
    ) -> PipelineResponseModel:
        """Creates a new pipeline in a workspace.

        Args:
            pipeline: The pipeline to create.

        Returns:
            The newly created pipeline.

        Raises:
            EntityExistsError: If an identical pipeline already exists.
        """
        with Session(self.engine) as session:
            # Check if pipeline with the given name already exists
            existing_pipeline = session.exec(
                select(PipelineSchema)
                .where(PipelineSchema.name == pipeline.name)
                .where(PipelineSchema.version == pipeline.version)
                .where(PipelineSchema.workspace_id == pipeline.workspace)
            ).first()
            if existing_pipeline is not None:
                raise EntityExistsError(
                    f"Unable to create pipeline in workspace "
                    f"'{pipeline.workspace}': A pipeline with this name and "
                    f"version already exists."
                )

            # Create the pipeline
            new_pipeline = PipelineSchema.from_request(pipeline)
            session.add(new_pipeline)
            session.commit()
            session.refresh(new_pipeline)

            return new_pipeline.to_model()

    def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
        """Get a pipeline with a given ID.

        Args:
            pipeline_id: ID of the pipeline.

        Returns:
            The pipeline.

        Raises:
            KeyError: if the pipeline does not exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline with the given ID exists
            pipeline = session.exec(
                select(PipelineSchema).where(PipelineSchema.id == pipeline_id)
            ).first()
            if pipeline is None:
                raise KeyError(
                    f"Unable to get pipeline with ID '{pipeline_id}': "
                    "No pipeline with this ID found."
                )

            return pipeline.to_model()

    def list_pipelines(
        self, pipeline_filter_model: PipelineFilterModel
    ) -> Page[PipelineResponseModel]:
        """List all pipelines matching the given filter criteria.

        Args:
            pipeline_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all pipelines matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(PipelineSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=PipelineSchema,
                filter_model=pipeline_filter_model,
            )

    @track(AnalyticsEvent.UPDATE_PIPELINE)
    def update_pipeline(
        self,
        pipeline_id: UUID,
        pipeline_update: PipelineUpdateModel,
    ) -> PipelineResponseModel:
        """Updates a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to be updated.
            pipeline_update: The update to be applied.

        Returns:
            The updated pipeline.

        Raises:
            KeyError: if the pipeline doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline with the given ID exists
            existing_pipeline = session.exec(
                select(PipelineSchema).where(PipelineSchema.id == pipeline_id)
            ).first()
            if existing_pipeline is None:
                raise KeyError(
                    f"Unable to update pipeline with ID {pipeline_id}: "
                    f"No pipeline with this ID found."
                )

            # Update the pipeline
            existing_pipeline.update(pipeline_update)

            session.add(existing_pipeline)
            session.commit()

            return existing_pipeline.to_model()

    @track(AnalyticsEvent.DELETE_PIPELINE)
    def delete_pipeline(self, pipeline_id: UUID) -> None:
        """Deletes a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to delete.

        Raises:
            KeyError: if the pipeline doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline with the given ID exists
            pipeline = session.exec(
                select(PipelineSchema).where(PipelineSchema.id == pipeline_id)
            ).first()
            if pipeline is None:
                raise KeyError(
                    f"Unable to delete pipeline with ID {pipeline_id}: "
                    f"No pipeline with this ID found."
                )

            session.delete(pipeline)
            session.commit()

    # ---------
    # Builds
    # ---------

    def create_build(
        self,
        build: PipelineBuildRequestModel,
    ) -> PipelineBuildResponseModel:
        """Creates a new build in a workspace.

        Args:
            build: The build to create.

        Returns:
            The newly created build.
        """
        with Session(self.engine) as session:
            # Create the build
            new_build = PipelineBuildSchema.from_request(build)
            session.add(new_build)
            session.commit()
            session.refresh(new_build)

            return new_build.to_model()

    def get_build(self, build_id: UUID) -> PipelineBuildResponseModel:
        """Get a build with a given ID.

        Args:
            build_id: ID of the build.

        Returns:
            The build.

        Raises:
            KeyError: If the build does not exist.
        """
        with Session(self.engine) as session:
            # Check if build with the given ID exists
            build = session.exec(
                select(PipelineBuildSchema).where(
                    PipelineBuildSchema.id == build_id
                )
            ).first()
            if build is None:
                raise KeyError(
                    f"Unable to get build with ID '{build_id}': "
                    "No build with this ID found."
                )

            return build.to_model()

    def list_builds(
        self, build_filter_model: PipelineBuildFilterModel
    ) -> Page[PipelineBuildResponseModel]:
        """List all builds matching the given filter criteria.

        Args:
            build_filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all builds matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(PipelineBuildSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=PipelineBuildSchema,
                filter_model=build_filter_model,
            )

    def delete_build(self, build_id: UUID) -> None:
        """Deletes a build.

        Args:
            build_id: The ID of the build to delete.

        Raises:
            KeyError: if the build doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if build with the given ID exists
            build = session.exec(
                select(PipelineBuildSchema).where(
                    PipelineBuildSchema.id == build_id
                )
            ).first()
            if build is None:
                raise KeyError(
                    f"Unable to delete build with ID {build_id}: "
                    f"No build with this ID found."
                )

            session.delete(build)
            session.commit()

    # ----------------------
    # Pipeline Deployments
    # ----------------------

    def create_deployment(
        self,
        deployment: PipelineDeploymentRequestModel,
    ) -> PipelineDeploymentResponseModel:
        """Creates a new deployment in a workspace.

        Args:
            deployment: The deployment to create.

        Returns:
            The newly created deployment.
        """
        with Session(self.engine) as session:
            code_reference_id = self._create_or_reuse_code_reference(
                session=session,
                workspace_id=deployment.workspace,
                code_reference=deployment.code_reference,
            )

            new_deployment = PipelineDeploymentSchema.from_request(
                deployment, code_reference_id=code_reference_id
            )
            session.add(new_deployment)
            session.commit()
            session.refresh(new_deployment)

            return new_deployment.to_model()

    def get_deployment(
        self, deployment_id: UUID
    ) -> PipelineDeploymentResponseModel:
        """Get a deployment with a given ID.

        Args:
            deployment_id: ID of the deployment.

        Returns:
            The deployment.

        Raises:
            KeyError: If the deployment does not exist.
        """
        with Session(self.engine) as session:
            # Check if deployment with the given ID exists
            deployment = session.exec(
                select(PipelineDeploymentSchema).where(
                    PipelineDeploymentSchema.id == deployment_id
                )
            ).first()
            if deployment is None:
                raise KeyError(
                    f"Unable to get deployment with ID '{deployment_id}': "
                    "No deployment with this ID found."
                )

            return deployment.to_model()

    def list_deployments(
        self, deployment_filter_model: PipelineDeploymentFilterModel
    ) -> Page[PipelineDeploymentResponseModel]:
        """List all deployments matching the given filter criteria.

        Args:
            deployment_filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all deployments matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(PipelineDeploymentSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=PipelineDeploymentSchema,
                filter_model=deployment_filter_model,
            )

    def delete_deployment(self, deployment_id: UUID) -> None:
        """Deletes a deployment.

        Args:
            deployment_id: The ID of the deployment to delete.

        Raises:
            KeyError: If the deployment doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if build with the given ID exists
            deployment = session.exec(
                select(PipelineDeploymentSchema).where(
                    PipelineDeploymentSchema.id == deployment_id
                )
            ).first()
            if deployment is None:
                raise KeyError(
                    f"Unable to delete deployment with ID {deployment_id}: "
                    f"No deployment with this ID found."
                )

            session.delete(deployment)
            session.commit()

    # ---------
    # Schedules
    # ---------

    def create_schedule(
        self, schedule: ScheduleRequestModel
    ) -> ScheduleResponseModel:
        """Creates a new schedule.

        Args:
            schedule: The schedule to create.

        Returns:
            The newly created schedule.
        """
        with Session(self.engine) as session:
            new_schedule = ScheduleSchema.from_create_model(model=schedule)
            session.add(new_schedule)
            session.commit()
            return new_schedule.to_model()

    def get_schedule(self, schedule_id: UUID) -> ScheduleResponseModel:
        """Get a schedule with a given ID.

        Args:
            schedule_id: ID of the schedule.

        Returns:
            The schedule.

        Raises:
            KeyError: if the schedule does not exist.
        """
        with Session(self.engine) as session:
            # Check if schedule with the given ID exists
            schedule = session.exec(
                select(ScheduleSchema).where(ScheduleSchema.id == schedule_id)
            ).first()
            if schedule is None:
                raise KeyError(
                    f"Unable to get schedule with ID '{schedule_id}': "
                    "No schedule with this ID found."
                )
            return schedule.to_model()

    def list_schedules(
        self, schedule_filter_model: ScheduleFilterModel
    ) -> Page[ScheduleResponseModel]:
        """List all schedules in the workspace.

        Args:
            schedule_filter_model: All filter parameters including pagination
                params

        Returns:
            A list of schedules.
        """
        with Session(self.engine) as session:
            query = select(ScheduleSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=ScheduleSchema,
                filter_model=schedule_filter_model,
            )

    def update_schedule(
        self,
        schedule_id: UUID,
        schedule_update: ScheduleUpdateModel,
    ) -> ScheduleResponseModel:
        """Updates a schedule.

        Args:
            schedule_id: The ID of the schedule to be updated.
            schedule_update: The update to be applied.

        Returns:
            The updated schedule.

        Raises:
            KeyError: if the schedule doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if schedule with the given ID exists
            existing_schedule = session.exec(
                select(ScheduleSchema).where(ScheduleSchema.id == schedule_id)
            ).first()
            if existing_schedule is None:
                raise KeyError(
                    f"Unable to update schedule with ID {schedule_id}: "
                    f"No schedule with this ID found."
                )

            # Update the schedule
            existing_schedule = existing_schedule.from_update_model(
                schedule_update
            )
            session.add(existing_schedule)
            session.commit()
            return existing_schedule.to_model()

    def delete_schedule(self, schedule_id: UUID) -> None:
        """Deletes a schedule.

        Args:
            schedule_id: The ID of the schedule to delete.

        Raises:
            KeyError: if the schedule doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if schedule with the given ID exists
            schedule = session.exec(
                select(ScheduleSchema).where(ScheduleSchema.id == schedule_id)
            ).first()
            if schedule is None:
                raise KeyError(
                    f"Unable to delete schedule with ID {schedule_id}: "
                    f"No schedule with this ID found."
                )

            # Delete the schedule
            session.delete(schedule)
            session.commit()

    # --------------
    # Pipeline runs
    # --------------

    def create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Creates a pipeline run.

        Args:
            pipeline_run: The pipeline run to create.

        Returns:
            The created pipeline run.

        Raises:
            EntityExistsError: If an identical pipeline run already exists.
        """
        with Session(self.engine) as session:
            # Check if pipeline run with same name already exists.
            existing_domain_run = session.exec(
                select(PipelineRunSchema).where(
                    PipelineRunSchema.name == pipeline_run.name
                )
            ).first()
            if existing_domain_run is not None:
                raise EntityExistsError(
                    f"Unable to create pipeline run: A pipeline run with name "
                    f"'{pipeline_run.name}' already exists."
                )

            # Check if pipeline run with same ID already exists.
            existing_id_run = session.exec(
                select(PipelineRunSchema).where(
                    PipelineRunSchema.id == pipeline_run.id
                )
            ).first()
            if existing_id_run is not None:
                raise EntityExistsError(
                    f"Unable to create pipeline run: A pipeline run with ID "
                    f"'{pipeline_run.id}' already exists."
                )

            # Query stack to ensure it exists in the DB
            stack_id = None
            if pipeline_run.stack is not None:
                stack_id = session.exec(
                    select(StackSchema.id).where(
                        StackSchema.id == pipeline_run.stack
                    )
                ).first()
                if stack_id is None:
                    logger.warning(
                        f"No stack found for this run. "
                        f"Creating pipeline run '{pipeline_run.name}' without "
                        "linked stack."
                    )

            # Query pipeline to ensure it exists in the DB
            pipeline_id = None
            if pipeline_run.pipeline is not None:
                pipeline_id = session.exec(
                    select(PipelineSchema.id).where(
                        PipelineSchema.id == pipeline_run.pipeline
                    )
                ).first()
                if pipeline_id is None:
                    logger.warning(
                        f"No pipeline found. Creating pipeline run "
                        f"'{pipeline_run.name}' as unlisted run."
                    )

            # Create the pipeline run
            new_run = PipelineRunSchema.from_request(pipeline_run)
            session.add(new_run)
            session.commit()

            return self._run_schema_to_model(new_run)

    def _run_schema_to_model(
        self, run: PipelineRunSchema
    ) -> PipelineRunResponseModel:
        """Converts a pipeline run schema to a pipeline run model incl. steps.

        Args:
            run: The pipeline run schema to convert.

        Returns:
            The converted pipeline run model with steps hydrated into it.
        """
        steps = {
            step.name: self._run_step_schema_to_model(step)
            for step in run.step_runs
        }
        return run.to_model(steps=steps)

    def get_run(
        self, run_name_or_id: Union[str, UUID]
    ) -> PipelineRunResponseModel:
        """Gets a pipeline run.

        Args:
            run_name_or_id: The name or ID of the pipeline run to get.

        Returns:
            The pipeline run.
        """
        with Session(self.engine) as session:
            run = self._get_run_schema(run_name_or_id, session=session)
            return self._run_schema_to_model(run)

    def get_or_create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> Tuple[PipelineRunResponseModel, bool]:
        """Gets or creates a pipeline run.

        If a run with the same ID or name already exists, it is returned.
        Otherwise, a new run is created.

        Args:
            pipeline_run: The pipeline run to get or create.

        Returns:
            The pipeline run, and a boolean indicating whether the run was
            created or not.
        """
        # We want to have the 'create' statement in the try block since running
        # it first will reduce concurrency issues.
        try:
            return self.create_run(pipeline_run), True
        except (EntityExistsError, IntegrityError):
            # Catch both `EntityExistsError`` and `IntegrityError`` exceptions
            # since either one can be raised by the database when trying
            # to create a new pipeline run with duplicate ID or name.
            try:
                return self.get_run(pipeline_run.id), False
            except KeyError:
                return self.get_run(pipeline_run.name), False

    def list_runs(
        self, runs_filter_model: PipelineRunFilterModel
    ) -> Page[PipelineRunResponseModel]:
        """List all pipeline runs matching the given filter criteria.

        Args:
            runs_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all pipeline runs matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(PipelineRunSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=PipelineRunSchema,
                filter_model=runs_filter_model,
                custom_schema_to_model_conversion=self._run_schema_to_model,
            )

    def update_run(
        self, run_id: UUID, run_update: PipelineRunUpdateModel
    ) -> PipelineRunResponseModel:
        """Updates a pipeline run.

        Args:
            run_id: The ID of the pipeline run to update.
            run_update: The update to be applied to the pipeline run.

        Returns:
            The updated pipeline run.

        Raises:
            KeyError: if the pipeline run doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline run with the given ID exists
            existing_run = session.exec(
                select(PipelineRunSchema).where(PipelineRunSchema.id == run_id)
            ).first()
            if existing_run is None:
                raise KeyError(
                    f"Unable to update pipeline run with ID {run_id}: "
                    f"No pipeline run with this ID found."
                )

            # Update the pipeline run
            existing_run.update(run_update=run_update)
            session.add(existing_run)
            session.commit()

            session.refresh(existing_run)
            return self._run_schema_to_model(existing_run)

    def delete_run(self, run_id: UUID) -> None:
        """Deletes a pipeline run.

        Args:
            run_id: The ID of the pipeline run to delete.

        Raises:
            KeyError: if the pipeline run doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline run with the given ID exists
            existing_run = session.exec(
                select(PipelineRunSchema).where(PipelineRunSchema.id == run_id)
            ).first()
            if existing_run is None:
                raise KeyError(
                    f"Unable to delete pipeline run with ID {run_id}: "
                    f"No pipeline run with this ID found."
                )

            # Delete the pipeline run
            session.delete(existing_run)
            session.commit()

    # ------------------
    # Pipeline run steps
    # ------------------

    def create_run_step(
        self, step_run: StepRunRequestModel
    ) -> StepRunResponseModel:
        """Creates a step run.

        Args:
            step_run: The step run to create.

        Returns:
            The created step run.

        Raises:
            EntityExistsError: if the step run already exists.
            KeyError: if the pipeline run doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if the pipeline run exists
            run = session.exec(
                select(PipelineRunSchema).where(
                    PipelineRunSchema.id == step_run.pipeline_run_id
                )
            ).first()
            if run is None:
                raise KeyError(
                    f"Unable to create step '{step_run.name}': No pipeline run "
                    f"with ID '{step_run.pipeline_run_id}' found."
                )

            # Check if the step name already exists in the pipeline run
            existing_step_run = session.exec(
                select(StepRunSchema)
                .where(StepRunSchema.name == step_run.name)
                .where(
                    StepRunSchema.pipeline_run_id == step_run.pipeline_run_id
                )
            ).first()
            if existing_step_run is not None:
                raise EntityExistsError(
                    f"Unable to create step '{step_run.name}': A step with this "
                    f"name already exists in the pipeline run with ID "
                    f"'{step_run.pipeline_run_id}'."
                )

            # Create the step
            step_schema = StepRunSchema.from_request(step_run)
            session.add(step_schema)

            # Add logs entry for the step if exists
            if step_run.logs is not None:
                log_entry = LogsSchema(
                    uri=step_run.logs.uri,
                    step_run_id=step_schema.id,
                    artifact_store_id=step_run.logs.artifact_store_id,
                )
                session.add(log_entry)

            # Save parent step IDs into the database.
            for parent_step_id in step_run.parent_step_ids:
                self._set_run_step_parent_step(
                    child_id=step_schema.id,
                    parent_id=parent_step_id,
                    session=session,
                )

            # Save input artifact IDs into the database.
            for input_name, artifact_id in step_run.inputs.items():
                self._set_run_step_input_artifact(
                    run_step_id=step_schema.id,
                    artifact_id=artifact_id,
                    name=input_name,
                    session=session,
                )

            # Save output artifact IDs into the database.
            for output_name, artifact_id in step_run.outputs.items():
                self._set_run_step_output_artifact(
                    step_run_id=step_schema.id,
                    artifact_id=artifact_id,
                    name=output_name,
                    session=session,
                )

            session.commit()

            return self._run_step_schema_to_model(step_schema)

    def _set_run_step_parent_step(
        self, child_id: UUID, parent_id: UUID, session: Session
    ) -> None:
        """Sets the parent step run for a step run.

        Args:
            child_id: The ID of the child step run to set the parent for.
            parent_id: The ID of the parent step run to set a child for.
            session: The database session to use.

        Raises:
            KeyError: if the child step run or parent step run doesn't exist.
        """
        # Check if the child step exists.
        child_step_run = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == child_id)
        ).first()
        if child_step_run is None:
            raise KeyError(
                f"Unable to set parent step for step with ID "
                f"{child_id}: No step with this ID found."
            )

        # Check if the parent step exists.
        parent_step_run = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == parent_id)
        ).first()
        if parent_step_run is None:
            raise KeyError(
                f"Unable to set parent step for step with ID "
                f"{child_id}: No parent step with ID {parent_id} "
                "found."
            )

        # Check if the parent step is already set.
        assignment = session.exec(
            select(StepRunParentsSchema)
            .where(StepRunParentsSchema.child_id == child_id)
            .where(StepRunParentsSchema.parent_id == parent_id)
        ).first()
        if assignment is not None:
            return

        # Save the parent step assignment in the database.
        assignment = StepRunParentsSchema(
            child_id=child_id, parent_id=parent_id
        )
        session.add(assignment)

    def _set_run_step_input_artifact(
        self, run_step_id: UUID, artifact_id: UUID, name: str, session: Session
    ) -> None:
        """Sets an artifact as an input of a step run.

        Args:
            run_step_id: The ID of the step run.
            artifact_id: The ID of the artifact.
            name: The name of the input in the step run.
            session: The database session to use.

        Raises:
            KeyError: if the step run or artifact doesn't exist.
        """
        # Check if the step exists.
        step_run = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == run_step_id)
        ).first()
        if step_run is None:
            raise KeyError(
                f"Unable to set input artifact: No step run with ID "
                f"'{run_step_id}' found."
            )

        # Check if the artifact exists.
        artifact = session.exec(
            select(ArtifactSchema).where(ArtifactSchema.id == artifact_id)
        ).first()
        if artifact is None:
            raise KeyError(
                f"Unable to set input artifact: No artifact with ID "
                f"'{artifact_id}' found."
            )

        # Check if the input is already set.
        assignment = session.exec(
            select(StepRunInputArtifactSchema)
            .where(StepRunInputArtifactSchema.step_id == run_step_id)
            .where(StepRunInputArtifactSchema.artifact_id == artifact_id)
            .where(StepRunInputArtifactSchema.name == name)
        ).first()
        if assignment is not None:
            return

        # Save the input assignment in the database.
        assignment = StepRunInputArtifactSchema(
            step_id=run_step_id, artifact_id=artifact_id, name=name
        )
        session.add(assignment)

    def _set_run_step_output_artifact(
        self,
        step_run_id: UUID,
        artifact_id: UUID,
        name: str,
        session: Session,
    ) -> None:
        """Sets an artifact as an output of a step run.

        Args:
            step_run_id: The ID of the step run.
            artifact_id: The ID of the artifact.
            name: The name of the output in the step run.
            session: The database session to use.

        Raises:
            KeyError: if the step run or artifact doesn't exist.
        """
        # Check if the step exists.
        step_run = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == step_run_id)
        ).first()
        if step_run is None:
            raise KeyError(
                f"Unable to set output artifact: No step run with ID "
                f"'{step_run_id}' found."
            )

        # Check if the artifact exists.
        artifact = session.exec(
            select(ArtifactSchema).where(ArtifactSchema.id == artifact_id)
        ).first()
        if artifact is None:
            raise KeyError(
                f"Unable to set output artifact: No artifact with ID "
                f"'{artifact_id}' found."
            )

        # Check if the output is already set.
        assignment = session.exec(
            select(StepRunOutputArtifactSchema)
            .where(StepRunOutputArtifactSchema.step_id == step_run_id)
            .where(StepRunOutputArtifactSchema.artifact_id == artifact_id)
        ).first()
        if assignment is not None:
            return

        # Save the output assignment in the database.
        assignment = StepRunOutputArtifactSchema(
            step_id=step_run_id,
            artifact_id=artifact_id,
            name=name,
        )
        session.add(assignment)

    def get_run_step(self, step_run_id: UUID) -> StepRunResponseModel:
        """Get a step run by ID.

        Args:
            step_run_id: The ID of the step run to get.

        Returns:
            The step run.

        Raises:
            KeyError: if the step run doesn't exist.
        """
        with Session(self.engine) as session:
            step_run = session.exec(
                select(StepRunSchema).where(StepRunSchema.id == step_run_id)
            ).first()
            if step_run is None:
                raise KeyError(
                    f"Unable to get step run with ID {step_run_id}: No step "
                    "run with this ID found."
                )
            return self._run_step_schema_to_model(step_run)

    def _run_step_schema_to_model(
        self, step_run: StepRunSchema
    ) -> StepRunResponseModel:
        """Converts a run step schema to a step model.

        Args:
            step_run: The run step schema to convert.

        Returns:
            The run step model.
        """
        with Session(self.engine) as session:
            # Get parent steps.
            parent_steps = session.exec(
                select(StepRunSchema)
                .where(StepRunParentsSchema.child_id == step_run.id)
                .where(StepRunParentsSchema.parent_id == StepRunSchema.id)
            ).all()
            parent_step_ids = [parent_step.id for parent_step in parent_steps]

            # Get input artifacts.
            input_artifact_list = session.exec(
                select(
                    ArtifactSchema,
                    StepRunInputArtifactSchema.name,
                )
                .where(
                    ArtifactSchema.id == StepRunInputArtifactSchema.artifact_id
                )
                .where(StepRunInputArtifactSchema.step_id == step_run.id)
            ).all()
            input_artifacts = {
                input_name: self._artifact_schema_to_model(artifact)
                for (artifact, input_name) in input_artifact_list
            }

            # Get output artifacts.
            output_artifact_list = session.exec(
                select(
                    ArtifactSchema,
                    StepRunOutputArtifactSchema.name,
                )
                .where(
                    ArtifactSchema.id
                    == StepRunOutputArtifactSchema.artifact_id
                )
                .where(StepRunOutputArtifactSchema.step_id == step_run.id)
            ).all()
            output_artifacts = {
                output_name: self._artifact_schema_to_model(artifact)
                for (artifact, output_name) in output_artifact_list
            }

            # Convert to model.
            return step_run.to_model(
                parent_step_ids=parent_step_ids,
                input_artifacts=input_artifacts,
                output_artifacts=output_artifacts,
            )

    def list_run_steps(
        self, step_run_filter_model: StepRunFilterModel
    ) -> Page[StepRunResponseModel]:
        """List all step runs matching the given filter criteria.

        Args:
            step_run_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all step runs matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(StepRunSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=StepRunSchema,
                filter_model=step_run_filter_model,
                custom_schema_to_model_conversion=self._run_step_schema_to_model,
            )

    def update_run_step(
        self,
        step_run_id: UUID,
        step_run_update: StepRunUpdateModel,
    ) -> StepRunResponseModel:
        """Updates a step run.

        Args:
            step_run_id: The ID of the step to update.
            step_run_update: The update to be applied to the step.

        Returns:
            The updated step run.

        Raises:
            KeyError: if the step run doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if the step exists
            existing_step_run = session.exec(
                select(StepRunSchema).where(StepRunSchema.id == step_run_id)
            ).first()
            if existing_step_run is None:
                raise KeyError(
                    f"Unable to update step with ID {step_run_id}: "
                    f"No step with this ID found."
                )

            # Update the step
            existing_step_run.update(step_run_update)
            session.add(existing_step_run)

            # Update the output artifacts.
            for name, artifact_id in step_run_update.outputs.items():
                self._set_run_step_output_artifact(
                    step_run_id=step_run_id,
                    artifact_id=artifact_id,
                    name=name,
                    session=session,
                )

            # Input artifacts and parent steps cannot be updated after the
            # step has been created.

            session.commit()
            session.refresh(existing_step_run)

            return self._run_step_schema_to_model(existing_step_run)

    # ---------
    # Artifacts
    # ---------

    def create_artifact(
        self, artifact: ArtifactRequestModel
    ) -> ArtifactResponseModel:
        """Creates an artifact.

        Args:
            artifact: The artifact to create.

        Returns:
            The created artifact.
        """
        with Session(self.engine) as session:
            # Save artifact.
            artifact_schema = ArtifactSchema.from_request(artifact)
            session.add(artifact_schema)

            # Save visualizations of the artifact.
            if artifact.visualizations:
                for vis in artifact.visualizations:
                    vis_schema = ArtifactVisualizationSchema.from_model(
                        visualization=vis, artifact_id=artifact_schema.id
                    )
                    session.add(vis_schema)

            session.commit()
            return self._artifact_schema_to_model(artifact_schema)

    def _artifact_schema_to_model(
        self, artifact_schema: ArtifactSchema
    ) -> ArtifactResponseModel:
        """Converts an artifact schema to a model.

        Args:
            artifact_schema: The artifact schema to convert.

        Returns:
            The converted artifact model.
        """
        # Find the producer step run ID.
        with Session(self.engine) as session:
            producer_step_run_id = session.exec(
                select(StepRunOutputArtifactSchema.step_id)
                .where(
                    StepRunOutputArtifactSchema.artifact_id
                    == artifact_schema.id
                )
                .where(StepRunOutputArtifactSchema.step_id == StepRunSchema.id)
                .where(StepRunSchema.status != ExecutionStatus.CACHED)
            ).first()

            # Convert the artifact schema to a model.
            return artifact_schema.to_model(
                producer_step_run_id=producer_step_run_id
            )

    def get_artifact(self, artifact_id: UUID) -> ArtifactResponseModel:
        """Gets an artifact.

        Args:
            artifact_id: The ID of the artifact to get.

        Returns:
            The artifact.

        Raises:
            KeyError: if the artifact doesn't exist.
        """
        with Session(self.engine) as session:
            artifact = session.exec(
                select(ArtifactSchema).where(ArtifactSchema.id == artifact_id)
            ).first()
            if artifact is None:
                raise KeyError(
                    f"Unable to get artifact with ID {artifact_id}: "
                    f"No artifact with this ID found."
                )
            return self._artifact_schema_to_model(artifact)

    def list_artifacts(
        self, artifact_filter_model: ArtifactFilterModel
    ) -> Page[ArtifactResponseModel]:
        """List all artifacts matching the given filter criteria.

        Args:
            artifact_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all artifacts matching the filter criteria.
        """
        with Session(self.engine) as session:
            query = select(ArtifactSchema)
            if artifact_filter_model.only_unused:
                query = query.where(
                    ArtifactSchema.id.notin_(  # type: ignore[attr-defined]
                        select(StepRunOutputArtifactSchema.artifact_id)
                    )
                )
                query = query.where(
                    ArtifactSchema.id.notin_(  # type: ignore[attr-defined]
                        select(StepRunInputArtifactSchema.artifact_id)
                    )
                )
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=ArtifactSchema,
                filter_model=artifact_filter_model,
                custom_schema_to_model_conversion=self._artifact_schema_to_model,
            )

    def delete_artifact(self, artifact_id: UUID) -> None:
        """Deletes an artifact.

        Args:
            artifact_id: The ID of the artifact to delete.

        Raises:
            KeyError: if the artifact doesn't exist.
        """
        with Session(self.engine) as session:
            artifact = session.exec(
                select(ArtifactSchema).where(ArtifactSchema.id == artifact_id)
            ).first()
            if artifact is None:
                raise KeyError(
                    f"Unable to delete artifact with ID {artifact_id}: "
                    f"No artifact with this ID found."
                )
            session.delete(artifact)
            session.commit()

    # ------------
    # Run Metadata
    # ------------

    def create_run_metadata(
        self, run_metadata: RunMetadataRequestModel
    ) -> RunMetadataResponseModel:
        """Creates run metadata.

        Args:
            run_metadata: The run metadata to create.

        Returns:
            The created run metadata.
        """
        with Session(self.engine) as session:
            run_metadata_schema = RunMetadataSchema.from_request(run_metadata)
            session.add(run_metadata_schema)
            session.commit()
            return run_metadata_schema.to_model()

    def list_run_metadata(
        self,
        run_metadata_filter_model: RunMetadataFilterModel,
    ) -> Page[RunMetadataResponseModel]:
        """List run metadata.

        Args:
            run_metadata_filter_model: All filter parameters including
                pagination params.

        Returns:
            The run metadata.
        """
        with Session(self.engine) as session:
            query = select(RunMetadataSchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=RunMetadataSchema,
                filter_model=run_metadata_filter_model,
            )

    # -----------------
    # Code Repositories
    # -----------------

    def create_code_repository(
        self, code_repository: CodeRepositoryRequestModel
    ) -> CodeRepositoryResponseModel:
        """Creates a new code repository.

        Args:
            code_repository: Code repository to be created.

        Returns:
            The newly created code repository.

        Raises:
            EntityExistsError: If a code repository with the given name already
                exists.
        """
        with Session(self.engine) as session:
            existing_repo = session.exec(
                select(CodeRepositorySchema)
                .where(CodeRepositorySchema.name == code_repository.name)
                .where(
                    CodeRepositorySchema.workspace_id
                    == code_repository.workspace
                )
            ).first()
            if existing_repo is not None:
                raise EntityExistsError(
                    f"Unable to create code repository in workspace "
                    f"'{code_repository.workspace}': A code repository with "
                    "this name already exists."
                )

            new_repo = CodeRepositorySchema.from_request(code_repository)
            session.add(new_repo)
            session.commit()
            session.refresh(new_repo)

            return new_repo.to_model()

    def get_code_repository(
        self, code_repository_id: UUID
    ) -> CodeRepositoryResponseModel:
        """Gets a specific code repository.

        Args:
            code_repository_id: The ID of the code repository to get.

        Returns:
            The requested code repository, if it was found.

        Raises:
            KeyError: If no code repository with the given ID exists.
        """
        with Session(self.engine) as session:
            repo = session.exec(
                select(CodeRepositorySchema).where(
                    CodeRepositorySchema.id == code_repository_id
                )
            ).first()
            if repo is None:
                raise KeyError(
                    f"Unable to get code repository with ID "
                    f"'{code_repository_id}': No code repository with this "
                    "ID found."
                )

            return repo.to_model()

    def list_code_repositories(
        self, filter_model: CodeRepositoryFilterModel
    ) -> Page[CodeRepositoryResponseModel]:
        """List all code repositories.

        Args:
            filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all code repositories.
        """
        with Session(self.engine) as session:
            query = select(CodeRepositorySchema)
            return self.filter_and_paginate(
                session=session,
                query=query,
                table=CodeRepositorySchema,
                filter_model=filter_model,
            )

    def update_code_repository(
        self, code_repository_id: UUID, update: CodeRepositoryUpdateModel
    ) -> CodeRepositoryResponseModel:
        """Updates an existing code repository.

        Args:
            code_repository_id: The ID of the code repository to update.
            update: The update to be applied to the code repository.

        Returns:
            The updated code repository.

        Raises:
            KeyError: If no code repository with the given name exists.
        """
        with Session(self.engine) as session:
            existing_repo = session.exec(
                select(CodeRepositorySchema).where(
                    CodeRepositorySchema.id == code_repository_id
                )
            ).first()
            if existing_repo is None:
                raise KeyError(
                    f"Unable to update code repository with ID "
                    f"{code_repository_id}: No code repository with this ID "
                    "found."
                )

            existing_repo.update(update)

            session.add(existing_repo)
            session.commit()

            return existing_repo.to_model()

    def delete_code_repository(self, code_repository_id: UUID) -> None:
        """Deletes a code repository.

        Args:
            code_repository_id: The ID of the code repository to delete.

        Raises:
            KeyError: If no code repository with the given ID exists.
        """
        with Session(self.engine) as session:
            existing_repo = session.exec(
                select(CodeRepositorySchema).where(
                    CodeRepositorySchema.id == code_repository_id
                )
            ).first()
            if existing_repo is None:
                raise KeyError(
                    f"Unable to delete code repository with ID "
                    f"{code_repository_id}: No code repository with this ID "
                    "found."
                )

            session.delete(existing_repo)
            session.commit()

    # ------------------
    # Service Connectors
    # ------------------

    @staticmethod
    def _fail_if_service_connector_with_name_exists_for_user(
        name: str,
        workspace_id: UUID,
        user_id: UUID,
        session: Session,
    ) -> None:
        """Raise an exception if a service connector with same name exists.

        Args:
            name: The name of the service connector
            workspace_id: The ID of the workspace
            user_id: The ID of the user
            session: The Session

        Returns:
            None

        Raises:
            EntityExistsError: If a service connector with the given name is
                already owned by the user
        """
        assert user_id
        # Check if service connector with the same domain key (name, workspace,
        # owner) already exists
        existing_domain_connector = session.exec(
            select(ServiceConnectorSchema)
            .where(ServiceConnectorSchema.name == name)
            .where(ServiceConnectorSchema.workspace_id == workspace_id)
            .where(ServiceConnectorSchema.user_id == user_id)
        ).first()
        if existing_domain_connector is not None:
            # Theoretically the user schema is optional, in this case there is
            #  no way that it will be None
            assert existing_domain_connector.user
            raise EntityExistsError(
                f"Unable to register service connector with name '{name}': "
                "Found an existing service connector with the same name in the "
                f"same workspace, '{existing_domain_connector.workspace.name}', "
                "owned by the same user, "
                f"{existing_domain_connector.user.name}'."
            )
        return None

    @staticmethod
    def _fail_if_service_connector_with_name_already_shared(
        name: str,
        workspace_id: UUID,
        session: Session,
    ) -> None:
        """Raise an exception if a service connector with same name is already shared.

        Args:
            name: The name of the service connector
            workspace_id: The ID of the workspace
            session: The Session

        Raises:
            EntityExistsError: If a service connector with the given name is
                already shared by another user
        """
        # Check if a service connector with the same name is already shared
        # within the workspace
        is_shared = True
        existing_shared_connector = session.exec(
            select(ServiceConnectorSchema)
            .where(ServiceConnectorSchema.name == name)
            .where(ServiceConnectorSchema.workspace_id == workspace_id)
            .where(ServiceConnectorSchema.is_shared == is_shared)
        ).first()
        if existing_shared_connector is not None:
            raise EntityExistsError(
                f"Unable to share service connector with name '{name}': Found "
                "an existing shared service connector with the same name in "
                f"workspace '{workspace_id}'."
            )

    def _create_connector_secret(
        self,
        connector_name: str,
        user: UUID,
        workspace: UUID,
        is_shared: bool,
        secrets: Optional[Dict[str, Optional[SecretStr]]],
    ) -> Optional[UUID]:
        """Creates a new secret to store the service connector secret credentials.

        Args:
            connector_name: The name of the service connector for which to
                create a secret.
            user: The ID of the user who owns the service connector.
            workspace: The ID of the workspace in which the service connector
                is registered.
            is_shared: Whether the service connector is shared.
            secrets: The secret credentials to store.

        Returns:
            The ID of the newly created secret or None, if the service connector
            does not contain any secret credentials.

        Raises:
            NotImplementedError: If a secrets store is not configured or
                supported.
        """
        if not secrets:
            return None

        if not self.secrets_store:
            raise NotImplementedError(
                "A secrets store is not configured or supported."
            )

        # Generate a unique name for the secret
        # Replace all non-alphanumeric characters with a dash because
        # the secret name must be a valid DNS subdomain name in some
        # secrets stores
        connector_name = re.sub(r"[^a-zA-Z0-9-]", "-", connector_name)
        # Generate unique names using a random suffix until we find a name
        # that is not already in use
        while True:
            secret_name = f"connector-{connector_name}-{random_str(4)}"
            existing_secrets = self.secrets_store.list_secrets(
                SecretFilterModel(
                    name=secret_name,
                )
            )
            if not existing_secrets.size:
                try:
                    return self.secrets_store.create_secret(
                        SecretRequestModel(
                            name=secret_name,
                            user=user,
                            workspace=workspace,
                            scope=SecretScope.WORKSPACE
                            if is_shared
                            else SecretScope.USER,
                            values=secrets,
                        )
                    ).id
                except KeyError:
                    # The secret already exists, try again
                    continue

    def _populate_connector_type(
        self, *service_connectors: ServiceConnectorResponseModel
    ) -> None:
        """Populates the connector type of the given service connectors.

        If the connector type is not locally available, the connector type
        field is left as is.

        Args:
            service_connectors: The service connectors to populate.
        """
        for service_connector in service_connectors:
            if not service_connector_registry.is_registered(
                service_connector.type
            ):
                continue
            service_connector.connector_type = (
                service_connector_registry.get_service_connector_type(
                    service_connector.type
                )
            )

    @track(AnalyticsEvent.CREATED_SERVICE_CONNECTOR, v1=False, v2=True)
    def create_service_connector(
        self, service_connector: ServiceConnectorRequestModel
    ) -> ServiceConnectorResponseModel:
        """Creates a new service connector.

        Args:
            service_connector: Service connector to be created.

        Returns:
            The newly created service connector.

        Raises:
            Exception: If anything goes wrong during the creation of the
                service connector.
        """
        # If the connector type is locally available, we validate the request
        # against the connector type schema before storing it in the database
        if service_connector_registry.is_registered(service_connector.type):
            connector_type = (
                service_connector_registry.get_service_connector_type(
                    service_connector.type
                )
            )
            service_connector.validate_and_configure_resources(
                connector_type=connector_type,
                resource_types=service_connector.resource_types,
                resource_id=service_connector.resource_id,
                configuration=service_connector.configuration,
                secrets=service_connector.secrets,
            )

        with Session(self.engine) as session:
            self._fail_if_service_connector_with_name_exists_for_user(
                name=service_connector.name,
                user_id=service_connector.user,
                workspace_id=service_connector.workspace,
                session=session,
            )

            if service_connector.is_shared:
                self._fail_if_service_connector_with_name_already_shared(
                    name=service_connector.name,
                    workspace_id=service_connector.workspace,
                    session=session,
                )

            # Create the secret
            secret_id = self._create_connector_secret(
                connector_name=service_connector.name,
                user=service_connector.user,
                workspace=service_connector.workspace,
                is_shared=service_connector.is_shared,
                secrets=service_connector.secrets,
            )
            try:
                # Create the service connector
                new_service_connector = ServiceConnectorSchema.from_request(
                    service_connector,
                    secret_id=secret_id,
                )

                session.add(new_service_connector)
                session.commit()

                session.refresh(new_service_connector)
            except Exception:
                # Delete the secret if it was created
                if secret_id and self.secrets_store:
                    try:
                        self.secrets_store.delete_secret(secret_id)
                    except Exception:
                        # Ignore any errors that occur while deleting the
                        # secret
                        pass

                raise

            connector = new_service_connector.to_model()
            self._populate_connector_type(connector)
            return connector

    def get_service_connector(
        self, service_connector_id: UUID
    ) -> ServiceConnectorResponseModel:
        """Gets a specific service connector.

        Args:
            service_connector_id: The ID of the service connector to get.

        Returns:
            The requested service connector, if it was found.

        Raises:
            KeyError: If no service connector with the given ID exists.
        """
        with Session(self.engine) as session:
            service_connector = session.exec(
                select(ServiceConnectorSchema).where(
                    ServiceConnectorSchema.id == service_connector_id
                )
            ).first()

            if service_connector is None:
                raise KeyError(
                    f"Service connector with ID {service_connector_id} not "
                    "found."
                )

            connector = service_connector.to_model()
            self._populate_connector_type(connector)
            return connector

    def _list_filtered_service_connectors(
        self,
        session: Session,
        query: Union[
            Select[ServiceConnectorSchema],
            SelectOfScalar[ServiceConnectorSchema],
        ],
        filter_model: ServiceConnectorFilterModel,
    ) -> List[ServiceConnectorSchema]:
        """Refine a service connector query.

        Applies resource type and label filters to the query.

        Args:
            session: The database session.
            query: The query to filter.
            filter_model: The filter model.

        Returns:
            The filtered list of service connectors.
        """
        items: List[ServiceConnectorSchema] = (
            session.exec(query).unique().all()
        )

        # filter out items that don't match the resource type
        if filter_model.resource_type:
            items = [
                item
                for item in items
                if filter_model.resource_type in item.resource_types_list
            ]

        # filter out items that don't match the labels
        if filter_model.labels:
            items = [
                item for item in items if item.has_labels(filter_model.labels)
            ]

        return items

    def list_service_connectors(
        self, filter_model: ServiceConnectorFilterModel
    ) -> Page[ServiceConnectorResponseModel]:
        """List all service connectors.

        Args:
            filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all service connectors.
        """

        def fetch_connectors(
            session: Session,
            query: Union[
                Select[ServiceConnectorSchema],
                SelectOfScalar[ServiceConnectorSchema],
            ],
            filter_model: BaseFilterModel,
        ) -> List[ServiceConnectorSchema]:
            """Custom fetch function for connector filtering and pagination.

            Applies resource type and label filters to the query.

            Args:
                session: The database session.
                query: The query to filter.
                filter_model: The filter model.

            Returns:
                The filtered and paginated results.
            """
            assert isinstance(filter_model, ServiceConnectorFilterModel)
            items = self._list_filtered_service_connectors(
                session=session, query=query, filter_model=filter_model
            )

            return items

        with Session(self.engine) as session:
            query = select(ServiceConnectorSchema)
            paged_connectors: Page[
                ServiceConnectorResponseModel
            ] = self.filter_and_paginate(
                session=session,
                query=query,
                table=ServiceConnectorSchema,
                filter_model=filter_model,
                custom_fetch=fetch_connectors,
            )

            self._populate_connector_type(*paged_connectors.items)
            return paged_connectors

    def _update_connector_secret(
        self,
        existing_connector: ServiceConnectorResponseModel,
        updated_connector: ServiceConnectorUpdateModel,
    ) -> Optional[UUID]:
        """Updates the secret for a service connector.

        If the secrets field in the service connector update is set (i.e. not
        None), the existing secret, if any, is replaced. If the secrets field is
        set to an empty dict, the existing secret is deleted.

        Args:
            existing_connector: Existing service connector for which to update a
                secret.
            updated_connector: Updated service connector.

        Returns:
            The ID of the updated secret or None, if the new service connector
            does not contain any secret credentials.

        Raises:
            NotImplementedError: If a secrets store is not configured or
                supported.
        """
        if not self.secrets_store:
            raise NotImplementedError(
                "A secrets store is not configured or supported."
            )

        is_shared = (
            existing_connector.is_shared
            if updated_connector.is_shared is None
            else updated_connector.is_shared
        )
        scope_changed = is_shared != existing_connector.is_shared

        if updated_connector.secrets is None:
            if scope_changed and existing_connector.secret_id:
                # Update the scope of the existing secret
                self.secrets_store.update_secret(
                    secret_id=existing_connector.secret_id,
                    secret_update=SecretUpdateModel(  # type: ignore[call-arg]
                        scope=SecretScope.WORKSPACE
                        if is_shared
                        else SecretScope.USER,
                    ),
                )

            # If the connector update does not contain a secrets update, keep
            # the existing secret (if any)
            return existing_connector.secret_id

        # Delete the existing secret (if any), to be replaced by the new secret
        if existing_connector.secret_id:
            try:
                self.secrets_store.delete_secret(existing_connector.secret_id)
            except KeyError:
                # Ignore if the secret no longer exists
                pass

        # If the new service connector does not contain any secret credentials,
        # return None
        if not updated_connector.secrets:
            return None

        assert existing_connector.user is not None
        # A secret does not exist yet, create a new one
        return self._create_connector_secret(
            connector_name=updated_connector.name or existing_connector.name,
            user=existing_connector.user.id,
            workspace=existing_connector.workspace.id,
            is_shared=is_shared,
            secrets=updated_connector.secrets,
        )

    def update_service_connector(
        self, service_connector_id: UUID, update: ServiceConnectorUpdateModel
    ) -> ServiceConnectorResponseModel:
        """Updates an existing service connector.

        The update model contains the fields to be updated. If a field value is
        set to None in the model, the field is not updated, but there are
        special rules concerning some fields:

        * the `configuration` and `secrets` fields together represent a full
        valid configuration update, not just a partial update. If either is
        set (i.e. not None) in the update, their values are merged together and
        will replace the existing configuration and secrets values.
        * the `resource_id` field value is also a full replacement value: if set
        to `None`, the resource ID is removed from the service connector.
        * the `expiration_seconds` field value is also a full replacement value:
        if set to `None`, the expiration is removed from the service connector.
        * the `secret_id` field value in the update is ignored, given that
        secrets are managed internally by the ZenML store.
        * the `labels` field is also a full labels update: if set (i.e. not
        `None`), all existing labels are removed and replaced by the new labels
        in the update.

        Args:
            service_connector_id: The ID of the service connector to update.
            update: The update to be applied to the service connector.

        Returns:
            The updated service connector.

        Raises:
            KeyError: If no service connector with the given ID exists.
            IllegalOperationError: If the service connector is referenced by
                one or more stack components and the update would change the
                connector type, resource type or resource ID.
        """
        with Session(self.engine) as session:
            existing_connector = session.exec(
                select(ServiceConnectorSchema).where(
                    ServiceConnectorSchema.id == service_connector_id
                )
            ).first()

            if existing_connector is None:
                raise KeyError(
                    f"Unable to update service connector with ID "
                    f"'{service_connector_id}': Found no existing service "
                    "connector with this ID."
                )

            # In case of a renaming update, make sure no service connector uses
            # that name already
            if update.name:
                if (
                    existing_connector.name != update.name
                    and existing_connector.user_id is not None
                ):
                    self._fail_if_service_connector_with_name_exists_for_user(
                        name=update.name,
                        workspace_id=existing_connector.workspace_id,
                        user_id=existing_connector.user_id,
                        session=session,
                    )

            # Check if service connector update makes the service connector a
            # shared service connector
            # In that case, check if a service connector with the same name is
            # already shared within the workspace
            if update.is_shared is not None:
                if not existing_connector.is_shared and update.is_shared:
                    self._fail_if_service_connector_with_name_already_shared(
                        name=update.name or existing_connector.name,
                        workspace_id=existing_connector.workspace_id,
                        session=session,
                    )

            existing_connector_model = existing_connector.to_model()

            if len(existing_connector.components):
                # If the service connector is already used in one or more
                # stack components, the update is no longer allowed to change
                # the service connector's authentication method, connector type,
                # resource type, or resource ID
                if (
                    update.connector_type
                    and update.type != existing_connector_model.connector_type
                ):
                    raise IllegalOperationError(
                        "The service type of a service connector that is "
                        "already actively used in one or more stack components "
                        "cannot be changed."
                    )

                if (
                    update.auth_method
                    and update.auth_method
                    != existing_connector_model.auth_method
                ):
                    raise IllegalOperationError(
                        "The authentication method of a service connector that "
                        "is already actively used in one or more stack "
                        "components cannot be changed."
                    )

                if (
                    update.resource_types
                    and update.resource_types
                    != existing_connector_model.resource_types
                ):
                    raise IllegalOperationError(
                        "The resource type of a service connector that is "
                        "already actively used in one or more stack components "
                        "cannot be changed."
                    )

                # The resource ID field cannot be used as a partial update: if
                # set to None, the existing resource ID is also removed
                if update.resource_id != existing_connector_model.resource_id:
                    raise IllegalOperationError(
                        "The resource ID of a service connector that is "
                        "already actively used in one or more stack components "
                        "cannot be changed."
                    )

            # If the connector type is locally available, we validate the update
            # against the connector type schema before storing it in the
            # database
            if service_connector_registry.is_registered(
                existing_connector.connector_type
            ):
                connector_type = (
                    service_connector_registry.get_service_connector_type(
                        existing_connector.connector_type
                    )
                )
                # We need the auth method to be set to be able to validate the
                # configuration
                update.auth_method = (
                    update.auth_method or existing_connector_model.auth_method
                )
                # Validate the configuration update. If the configuration or
                # secrets fields are set, together they are merged into a
                # full configuration that is validated against the connector
                # type schema and replaces the existing configuration and
                # secrets values
                update.validate_and_configure_resources(
                    connector_type=connector_type,
                    resource_types=update.resource_types,
                    resource_id=update.resource_id,
                    configuration=update.configuration,
                    secrets=update.secrets,
                )

            # Update secret
            secret_id = self._update_connector_secret(
                existing_connector=existing_connector_model,
                updated_connector=update,
            )

            existing_connector.update(
                connector_update=update, secret_id=secret_id
            )
            session.add(existing_connector)
            session.commit()

            connector = existing_connector.to_model()
            self._populate_connector_type(connector)
            return connector

    def delete_service_connector(self, service_connector_id: UUID) -> None:
        """Deletes a service connector.

        Args:
            service_connector_id: The ID of the service connector to delete.

        Raises:
            KeyError: If no service connector with the given ID exists.
            IllegalOperationError: If the service connector is still referenced
                by one or more stack components.
        """
        with Session(self.engine) as session:
            try:
                service_connector = session.exec(
                    select(ServiceConnectorSchema).where(
                        ServiceConnectorSchema.id == service_connector_id
                    )
                ).one()

                if service_connector is None:
                    raise KeyError(
                        f"Service connector with ID {service_connector_id} not "
                        "found."
                    )

                if len(service_connector.components) > 0:
                    raise IllegalOperationError(
                        f"Service connector with ID {service_connector_id} "
                        f"cannot be deleted as it is still referenced by "
                        f"{len(service_connector.components)} "
                        "stack components. Before deleting this service "
                        "connector, make sure to remove it from all stack "
                        "components."
                    )
                else:
                    session.delete(service_connector)

                if service_connector.secret_id and self.secrets_store:
                    try:
                        self.secrets_store.delete_secret(
                            service_connector.secret_id
                        )
                    except KeyError:
                        # If the secret doesn't exist anymore, we can ignore
                        # this error
                        pass
            except NoResultFound as error:
                raise KeyError from error

            session.commit()

    def verify_service_connector_config(
        self,
        service_connector: ServiceConnectorRequestModel,
        list_resources: bool = True,
    ) -> ServiceConnectorResourcesModel:
        """Verifies if a service connector configuration has access to resources.

        Args:
            service_connector: The service connector configuration to verify.
            list_resources: If True, the list of all resources accessible
                through the service connector is returned.

        Returns:
            The list of resources that the service connector configuration has
            access to.
        """
        connector_instance = service_connector_registry.instantiate_connector(
            model=service_connector
        )
        return connector_instance.verify(list_resources=list_resources)

    def verify_service_connector(
        self,
        service_connector_id: UUID,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
        list_resources: bool = True,
    ) -> ServiceConnectorResourcesModel:
        """Verifies if a service connector instance has access to one or more resources.

        Args:
            service_connector_id: The ID of the service connector to verify.
            resource_type: The type of resource to verify access to.
            resource_id: The ID of the resource to verify access to.
            list_resources: If True, the list of all resources accessible
                through the service connector and matching the supplied resource
                type and ID are returned.

        Returns:
            The list of resources that the service connector has access to,
            scoped to the supplied resource type and ID, if provided.
        """
        connector = self.get_service_connector(service_connector_id)

        connector_instance = service_connector_registry.instantiate_connector(
            model=connector
        )

        return connector_instance.verify(
            resource_type=resource_type,
            resource_id=resource_id,
            list_resources=list_resources,
        )

    def get_service_connector_client(
        self,
        service_connector_id: UUID,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
    ) -> ServiceConnectorResponseModel:
        """Get a service connector client for a service connector and given resource.

        Args:
            service_connector_id: The ID of the base service connector to use.
            resource_type: The type of resource to get a client for.
            resource_id: The ID of the resource to get a client for.

        Returns:
            A service connector client that can be used to access the given
            resource.
        """
        connector = self.get_service_connector(service_connector_id)

        connector_instance = service_connector_registry.instantiate_connector(
            model=connector
        )

        # Fetch the connector client
        connector_client = connector_instance.get_connector_client(
            resource_type=resource_type,
            resource_id=resource_id,
        )

        # Return the model for the connector client
        connector = connector_client.to_response_model(
            user=connector.user,
            workspace=connector.workspace,
            is_shared=connector.is_shared,
            description=connector.description,
            labels=connector.labels,
        )

        self._populate_connector_type(connector)

        return connector

    def list_service_connector_resources(
        self,
        user_name_or_id: Union[str, UUID],
        workspace_name_or_id: Union[str, UUID],
        connector_type: Optional[str] = None,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
    ) -> List[ServiceConnectorResourcesModel]:
        """List resources that can be accessed by service connectors.

        Args:
            user_name_or_id: The name or ID of the user to scope to.
            workspace_name_or_id: The name or ID of the workspace to scope to.
            connector_type: The type of service connector to scope to.
            resource_type: The type of resource to scope to.
            resource_id: The ID of the resource to scope to.

        Returns:
            The matching list of resources that available service
            connectors have access to.
        """
        user = self.get_user(user_name_or_id)
        workspace = self.get_workspace(workspace_name_or_id)
        connector_filter_model = ServiceConnectorFilterModel(
            connector_type=connector_type,
            resource_type=resource_type,
            is_shared=True,
            workspace_id=workspace.id,
        )

        shared_connectors = self.list_service_connectors(
            filter_model=connector_filter_model
        ).items

        connector_filter_model = ServiceConnectorFilterModel(
            connector_type=connector_type,
            resource_type=resource_type,
            is_shared=False,
            user_id=user.id,
            workspace_id=workspace.id,
        )

        private_connectors = self.list_service_connectors(
            filter_model=connector_filter_model
        ).items

        resource_list: List[ServiceConnectorResourcesModel] = []

        for connector in list(shared_connectors) + list(private_connectors):
            if not service_connector_registry.is_registered(connector.type):
                # For connectors that we can instantiate, i.e. those that have a
                # connector type available locally, we return complete
                # information about the resources that they have access to.
                #
                # For those that are not locally available, we only return
                # rudimentary information extracted from the connector model
                # without actively trying to discover the resources that they
                # have access to.

                if resource_id and connector.resource_id != resource_id:
                    # If an explicit resource ID is required, the connector
                    # has to be configured with it.
                    continue

                resources = (
                    ServiceConnectorResourcesModel.from_connector_model(
                        connector,
                        resource_type=resource_type,
                    )
                )
                for r in resources.resources:
                    if not r.resource_ids:
                        r.error = (
                            f"The service '{connector.type}' connector type is "
                            "not available."
                        )

            else:
                try:
                    connector_instance = (
                        service_connector_registry.instantiate_connector(
                            model=connector
                        )
                    )

                    resources = connector_instance.verify(
                        resource_type=resource_type,
                        resource_id=resource_id,
                        list_resources=True,
                    )
                except (ValueError, AuthorizationException) as e:
                    error = (
                        f'Failed to fetch {resource_type or "available"} '
                        f"resources from service connector {connector.name}/"
                        f"{connector.id}: {e}"
                    )
                    # Log an exception if debug logging is enabled
                    if logger.isEnabledFor(logging.DEBUG):
                        logger.exception(error)
                    else:
                        logger.error(error)
                    continue

            resource_list.append(resources)

        return resource_list

    def list_service_connector_types(
        self,
        connector_type: Optional[str] = None,
        resource_type: Optional[str] = None,
        auth_method: Optional[str] = None,
    ) -> List[ServiceConnectorTypeModel]:
        """Get a list of service connector types.

        Args:
            connector_type: Filter by connector type.
            resource_type: Filter by resource type.
            auth_method: Filter by authentication method.

        Returns:
            List of service connector types.
        """
        return service_connector_registry.list_service_connector_types(
            connector_type=connector_type,
            resource_type=resource_type,
            auth_method=auth_method,
        )

    def get_service_connector_type(
        self,
        connector_type: str,
    ) -> ServiceConnectorTypeModel:
        """Returns the requested service connector type.

        Args:
            connector_type: the service connector type identifier.

        Returns:
            The requested service connector type.
        """
        return service_connector_registry.get_service_connector_type(
            connector_type
        )

    # =======================
    # Internal helper methods
    # =======================
    @staticmethod
    def _get_schema_by_name_or_id(
        object_name_or_id: Union[str, UUID],
        schema_class: Type[AnyNamedSchema],
        schema_name: str,
        session: Session,
    ) -> AnyNamedSchema:
        """Query a schema by its 'name' or 'id' field.

        Args:
            object_name_or_id: The name or ID of the object to query.
            schema_class: The schema class to query. E.g., `WorkspaceSchema`.
            schema_name: The name of the schema used for error messages.
                E.g., "workspace".
            session: The database session to use.

        Returns:
            The schema object.

        Raises:
            KeyError: if the object couldn't be found.
            ValueError: if the schema_name isn't provided.
        """
        if object_name_or_id is None:
            raise ValueError(
                f"Unable to get {schema_name}: No {schema_name} ID or name "
                "provided."
            )
        if uuid_utils.is_valid_uuid(object_name_or_id):
            filter_params = schema_class.id == object_name_or_id
            error_msg = (
                f"Unable to get {schema_name} with name or ID "
                f"'{object_name_or_id}': No {schema_name} with this ID found."
            )
        else:
            filter_params = schema_class.name == object_name_or_id
            error_msg = (
                f"Unable to get {schema_name} with name or ID "
                f"'{object_name_or_id}': '{object_name_or_id}' is not a valid "
                f" UUID and no {schema_name} with this name exists."
            )

        schema = session.exec(
            select(schema_class).where(filter_params)
        ).first()

        if schema is None:
            raise KeyError(error_msg)
        return schema

    def _get_workspace_schema(
        self,
        workspace_name_or_id: Union[str, UUID],
        session: Session,
    ) -> WorkspaceSchema:
        """Gets a workspace schema by name or ID.

        This is a helper method that is used in various places to find the
        workspace associated to some other object.

        Args:
            workspace_name_or_id: The name or ID of the workspace to get.
            session: The database session to use.

        Returns:
            The workspace schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=workspace_name_or_id,
            schema_class=WorkspaceSchema,
            schema_name="workspace",
            session=session,
        )

    def _get_user_schema(
        self,
        user_name_or_id: Union[str, UUID],
        session: Session,
    ) -> UserSchema:
        """Gets a user schema by name or ID.

        This is a helper method that is used in various places to find the
        user associated to some other object.

        Args:
            user_name_or_id: The name or ID of the user to get.
            session: The database session to use.

        Returns:
            The user schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=user_name_or_id,
            schema_class=UserSchema,
            schema_name="user",
            session=session,
        )

    def _get_team_schema(
        self,
        team_name_or_id: Union[str, UUID],
        session: Session,
    ) -> TeamSchema:
        """Gets a team schema by name or ID.

        This is a helper method that is used in various places to find a team
        by its name or ID.

        Args:
            team_name_or_id: The name or ID of the team to get.
            session: The database session to use.

        Returns:
            The team schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=team_name_or_id,
            schema_class=TeamSchema,
            schema_name="team",
            session=session,
        )

    def _get_role_schema(
        self,
        role_name_or_id: Union[str, UUID],
        session: Session,
    ) -> RoleSchema:
        """Gets a role schema by name or ID.

        This is a helper method that is used in various places to find a role
        by its name or ID.

        Args:
            role_name_or_id: The name or ID of the role to get.
            session: The database session to use.

        Returns:
            The role schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=role_name_or_id,
            schema_class=RoleSchema,
            schema_name="role",
            session=session,
        )

    def _get_run_schema(
        self,
        run_name_or_id: Union[str, UUID],
        session: Session,
    ) -> PipelineRunSchema:
        """Gets a run schema by name or ID.

        This is a helper method that is used in various places to find a run
        by its name or ID.

        Args:
            run_name_or_id: The name or ID of the run to get.
            session: The database session to use.

        Returns:
            The run schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=run_name_or_id,
            schema_class=PipelineRunSchema,
            schema_name="run",
            session=session,
        )

    def _create_or_reuse_code_reference(
        self,
        session: Session,
        workspace_id: UUID,
        code_reference: Optional["CodeReferenceRequestModel"],
    ) -> Optional[UUID]:
        """Creates or reuses a code reference.

        Args:
            session: The database session to use.
            workspace_id: ID of the workspace in which the code reference
                should be.
            code_reference: Request of the reference to create.

        Returns:
            The code reference ID.
        """
        if not code_reference:
            return None

        existing_reference = session.exec(
            select(CodeReferenceSchema)
            .where(CodeReferenceSchema.workspace_id == workspace_id)
            .where(
                CodeReferenceSchema.code_repository_id
                == code_reference.code_repository
            )
            .where(CodeReferenceSchema.commit == code_reference.commit)
            .where(
                CodeReferenceSchema.subdirectory == code_reference.subdirectory
            )
        ).first()
        if existing_reference is not None:
            return existing_reference.id

        new_reference = CodeReferenceSchema.from_request(
            code_reference, workspace_id=workspace_id
        )

        session.add(new_reference)
        return new_reference.id
alembic: Alembic property readonly

The Alembic wrapper.

Returns:

Type Description
Alembic

The Alembic wrapper.

Exceptions:

Type Description
ValueError

If the store is not initialized.

engine: Engine property readonly

The SQLAlchemy engine.

Returns:

Type Description
Engine

The SQLAlchemy engine.

Exceptions:

Type Description
ValueError

If the store is not initialized.

runs_inside_server: bool property readonly

Whether the store is running inside a server.

Returns:

Type Description
bool

Whether the store is running inside a server.

CONFIG_TYPE (StoreConfiguration) pydantic-model

SQL ZenML store configuration.

Attributes:

Name Type Description
type StoreType

The type of the store.

secrets_store Optional[zenml.config.secrets_store_config.SecretsStoreConfiguration]

The configuration of the secrets store to use. This defaults to a SQL secrets store that extends the SQL ZenML store.

driver Optional[zenml.zen_stores.sql_zen_store.SQLDatabaseDriver]

The SQL database driver.

database Optional[str]

database name. If not already present on the server, it will be created automatically on first access.

username Optional[str]

The database username.

password Optional[str]

The database password.

ssl_ca Optional[str]

certificate authority certificate. Required for SSL enabled authentication if the CA certificate is not part of the certificates shipped by the operating system.

ssl_cert Optional[str]

client certificate. Required for SSL enabled authentication if client certificates are used.

ssl_key Optional[str]

client certificate private key. Required for SSL enabled if client certificates are used.

ssl_verify_server_cert bool

set to verify the identity of the server against the provided server certificate.

pool_size int

The maximum number of connections to keep in the SQLAlchemy pool.

max_overflow int

The maximum number of connections to allow in the SQLAlchemy pool in addition to the pool_size.

pool_pre_ping bool

Enable emitting a test statement on the SQL connection at the start of each connection pool checkout, to test that the database connection is still viable.

Source code in zenml/zen_stores/sql_zen_store.py
class SqlZenStoreConfiguration(StoreConfiguration):
    """SQL ZenML store configuration.

    Attributes:
        type: The type of the store.
        secrets_store: The configuration of the secrets store to use.
            This defaults to a SQL secrets store that extends the SQL ZenML
            store.
        driver: The SQL database driver.
        database: database name. If not already present on the server, it will
            be created automatically on first access.
        username: The database username.
        password: The database password.
        ssl_ca: certificate authority certificate. Required for SSL
            enabled authentication if the CA certificate is not part of the
            certificates shipped by the operating system.
        ssl_cert: client certificate. Required for SSL enabled
            authentication if client certificates are used.
        ssl_key: client certificate private key. Required for SSL
            enabled if client certificates are used.
        ssl_verify_server_cert: set to verify the identity of the server
            against the provided server certificate.
        pool_size: The maximum number of connections to keep in the SQLAlchemy
            pool.
        max_overflow: The maximum number of connections to allow in the
            SQLAlchemy pool in addition to the pool_size.
        pool_pre_ping: Enable emitting a test statement on the SQL connection
            at the start of each connection pool checkout, to test that the
            database connection is still viable.
    """

    type: StoreType = StoreType.SQL

    secrets_store: Optional[SecretsStoreConfiguration] = None

    driver: Optional[SQLDatabaseDriver] = None
    database: Optional[str] = None
    username: Optional[str] = None
    password: Optional[str] = None
    ssl_ca: Optional[str] = None
    ssl_cert: Optional[str] = None
    ssl_key: Optional[str] = None
    ssl_verify_server_cert: bool = False
    pool_size: int = 20
    max_overflow: int = 20
    pool_pre_ping: bool = True

    @validator("secrets_store")
    def validate_secrets_store(
        cls, secrets_store: Optional[SecretsStoreConfiguration]
    ) -> SecretsStoreConfiguration:
        """Ensures that the secrets store is initialized with a default SQL secrets store.

        Args:
            secrets_store: The secrets store config to be validated.

        Returns:
            The validated secrets store config.
        """
        if secrets_store is None:
            secrets_store = SqlSecretsStoreConfiguration()

        return secrets_store

    @root_validator(pre=True)
    def _remove_grpc_attributes(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        """Removes old GRPC attributes.

        Args:
            values: All model attribute values.

        Returns:
            The model attribute values
        """
        grpc_attribute_keys = [
            "grpc_metadata_host",
            "grpc_metadata_port",
            "grpc_metadata_ssl_ca",
            "grpc_metadata_ssl_key",
            "grpc_metadata_ssl_cert",
        ]
        grpc_values = [values.pop(key, None) for key in grpc_attribute_keys]
        if any(grpc_values):
            logger.warning(
                "The GRPC attributes %s are unused and will be removed soon. "
                "Please remove them from SQLZenStore configuration. This will "
                "become an error in future versions of ZenML."
            )

        return values

    @root_validator
    def _validate_url(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        """Validate the SQL URL.

        The validator also moves the MySQL username, password and database
        parameters from the URL into the other configuration arguments, if they
        are present in the URL.

        Args:
            values: The values to validate.

        Returns:
            The validated values.

        Raises:
            ValueError: If the URL is invalid or the SQL driver is not
                supported.
        """
        url = values.get("url")
        if url is None:
            return values

        # When running inside a container, if the URL uses localhost, the
        # target service will not be available. We try to replace localhost
        # with one of the special Docker or K3D internal hostnames.
        url = replace_localhost_with_internal_hostname(url)

        try:
            sql_url = make_url(url)
        except ArgumentError as e:
            raise ValueError(
                "Invalid SQL URL `%s`: %s. The URL must be in the format "
                "`driver://[[username:password@]hostname:port]/database["
                "?<extra-args>]`.",
                url,
                str(e),
            )

        if sql_url.drivername not in SQLDatabaseDriver.values():
            raise ValueError(
                "Invalid SQL driver value `%s`: The driver must be one of: %s.",
                url,
                ", ".join(SQLDatabaseDriver.values()),
            )
        values["driver"] = SQLDatabaseDriver(sql_url.drivername)
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            if (
                sql_url.username
                or sql_url.password
                or sql_url.query
                or sql_url.database is None
            ):
                raise ValueError(
                    "Invalid SQLite URL `%s`: The URL must be in the "
                    "format `sqlite:///path/to/database.db`.",
                    url,
                )
            if values.get("username") or values.get("password"):
                raise ValueError(
                    "Invalid SQLite configuration: The username and password "
                    "must not be set",
                    url,
                )
            values["database"] = sql_url.database
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            if sql_url.username:
                values["username"] = sql_url.username
                sql_url = sql_url._replace(username=None)
            if sql_url.password:
                values["password"] = sql_url.password
                sql_url = sql_url._replace(password=None)
            if sql_url.database:
                values["database"] = sql_url.database
                sql_url = sql_url._replace(database=None)
            if sql_url.query:
                for k, v in sql_url.query.items():
                    if k == "ssl_ca":
                        values["ssl_ca"] = v
                    elif k == "ssl_cert":
                        values["ssl_cert"] = v
                    elif k == "ssl_key":
                        values["ssl_key"] = v
                    elif k == "ssl_verify_server_cert":
                        values["ssl_verify_server_cert"] = v
                    else:
                        raise ValueError(
                            "Invalid MySQL URL query parameter `%s`: The "
                            "parameter must be one of: ssl_ca, ssl_cert, "
                            "ssl_key, or ssl_verify_server_cert.",
                            k,
                        )
                sql_url = sql_url._replace(query={})

            database = values.get("database")
            if (
                not values.get("username")
                or not values.get("password")
                or not database
            ):
                raise ValueError(
                    "Invalid MySQL configuration: The username, password and "
                    "database must be set in the URL or as configuration "
                    "attributes",
                )

            regexp = r"^[^\\/?%*:|\"<>.-]{1,64}$"
            match = re.match(regexp, database)
            if not match:
                raise ValueError(
                    f"The database name does not conform to the required "
                    f"format "
                    f"rules ({regexp}): {database}"
                )

            # Save the certificates in a secure location on disk
            secret_folder = Path(
                GlobalConfiguration().local_stores_path,
                "certificates",
            )
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                content = values.get(key)
                if content and not os.path.isfile(content):
                    fileio.makedirs(str(secret_folder))
                    file_path = Path(secret_folder, f"{key}.pem")
                    with open(file_path, "w") as f:
                        f.write(content)
                    file_path.chmod(0o600)
                    values[key] = str(file_path)

        values["url"] = str(sql_url)
        return values

    @staticmethod
    def get_local_url(path: str) -> str:
        """Get a local SQL url for a given local path.

        Args:
            path: The path to the local sqlite file.

        Returns:
            The local SQL url for the given path.
        """
        return f"sqlite:///{path}/{ZENML_SQLITE_DB_FILENAME}"

    @classmethod
    def supports_url_scheme(cls, url: str) -> bool:
        """Check if a URL scheme is supported by this store.

        Args:
            url: The URL to check.

        Returns:
            True if the URL scheme is supported, False otherwise.
        """
        return make_url(url).drivername in SQLDatabaseDriver.values()

    def expand_certificates(self) -> None:
        """Expands the certificates in the verify_ssl field."""
        # Load the certificate values back into the configuration
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            file_path = getattr(self, key, None)
            if file_path and os.path.isfile(file_path):
                with open(file_path, "r") as f:
                    setattr(self, key, f.read())

    @classmethod
    def copy_configuration(
        cls,
        config: "StoreConfiguration",
        config_path: str,
        load_config_path: Optional[PurePath] = None,
    ) -> "StoreConfiguration":
        """Copy the store config using a different configuration path.

        This method is used to create a copy of the store configuration that can
        be loaded using a different configuration path or in the context of a
        new environment, such as a container image.

        The configuration files accompanying the store configuration are also
        copied to the new configuration path (e.g. certificates etc.).

        Args:
            config: The store configuration to copy.
            config_path: new path where the configuration copy will be loaded
                from.
            load_config_path: absolute path that will be used to load the copied
                configuration. This can be set to a value different from
                `config_path` if the configuration copy will be loaded from
                a different environment, e.g. when the configuration is copied
                to a container image and loaded using a different absolute path.
                This will be reflected in the paths and URLs encoded in the
                copied configuration.

        Returns:
            A new store configuration object that reflects the new configuration
            path.
        """
        assert isinstance(config, SqlZenStoreConfiguration)
        config = config.copy()

        if config.driver == SQLDatabaseDriver.MYSQL:
            # Load the certificate values back into the configuration
            config.expand_certificates()

        elif config.driver == SQLDatabaseDriver.SQLITE:
            if load_config_path:
                config.url = cls.get_local_url(str(load_config_path))
            else:
                config.url = cls.get_local_url(config_path)

        return config

    def get_sqlmodel_config(
        self,
    ) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
        """Get the SQLModel engine configuration for the SQL ZenML store.

        Returns:
            The URL and connection arguments for the SQLModel engine.

        Raises:
            NotImplementedError: If the SQL driver is not supported.
        """
        sql_url = make_url(self.url)
        sqlalchemy_connect_args: Dict[str, Any] = {}
        engine_args = {}
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            assert self.database is not None
            # The following default value is needed for sqlite to avoid the
            # Error:
            #   sqlite3.ProgrammingError: SQLite objects created in a thread can
            #   only be used in that same thread.
            sqlalchemy_connect_args = {"check_same_thread": False}
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            # all these are guaranteed by our root validator
            assert self.database is not None
            assert self.username is not None
            assert self.password is not None
            assert sql_url.host is not None

            engine_args = {
                "pool_size": self.pool_size,
                "max_overflow": self.max_overflow,
                "pool_pre_ping": self.pool_pre_ping,
            }

            sql_url = sql_url._replace(
                drivername="mysql+pymysql",
                username=self.username,
                password=self.password,
                database=self.database,
            )

            sqlalchemy_ssl_args: Dict[str, Any] = {}

            # Handle SSL params
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                ssl_setting = getattr(self, key)
                if not ssl_setting:
                    continue
                if not os.path.isfile(ssl_setting):
                    logger.warning(
                        f"Database SSL setting `{key}` is not a file. "
                    )
                sqlalchemy_ssl_args[key.lstrip("ssl_")] = ssl_setting
            if len(sqlalchemy_ssl_args) > 0:
                sqlalchemy_ssl_args[
                    "check_hostname"
                ] = self.ssl_verify_server_cert
                sqlalchemy_connect_args["ssl"] = sqlalchemy_ssl_args
        else:
            raise NotImplementedError(
                f"SQL driver `{sql_url.drivername}` is not supported."
            )

        return str(sql_url), sqlalchemy_connect_args, engine_args

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the certificate attributes can be expanded to the contents
        # of the certificate files.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/sql_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the certificate attributes can be expanded to the contents
    # of the certificate files.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
copy_configuration(config, config_path, load_config_path=None) classmethod

Copy the store config using a different configuration path.

This method is used to create a copy of the store configuration that can be loaded using a different configuration path or in the context of a new environment, such as a container image.

The configuration files accompanying the store configuration are also copied to the new configuration path (e.g. certificates etc.).

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to copy.

required
config_path str

new path where the configuration copy will be loaded from.

required
load_config_path Optional[pathlib.PurePath]

absolute path that will be used to load the copied configuration. This can be set to a value different from config_path if the configuration copy will be loaded from a different environment, e.g. when the configuration is copied to a container image and loaded using a different absolute path. This will be reflected in the paths and URLs encoded in the copied configuration.

None

Returns:

Type Description
StoreConfiguration

A new store configuration object that reflects the new configuration path.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def copy_configuration(
    cls,
    config: "StoreConfiguration",
    config_path: str,
    load_config_path: Optional[PurePath] = None,
) -> "StoreConfiguration":
    """Copy the store config using a different configuration path.

    This method is used to create a copy of the store configuration that can
    be loaded using a different configuration path or in the context of a
    new environment, such as a container image.

    The configuration files accompanying the store configuration are also
    copied to the new configuration path (e.g. certificates etc.).

    Args:
        config: The store configuration to copy.
        config_path: new path where the configuration copy will be loaded
            from.
        load_config_path: absolute path that will be used to load the copied
            configuration. This can be set to a value different from
            `config_path` if the configuration copy will be loaded from
            a different environment, e.g. when the configuration is copied
            to a container image and loaded using a different absolute path.
            This will be reflected in the paths and URLs encoded in the
            copied configuration.

    Returns:
        A new store configuration object that reflects the new configuration
        path.
    """
    assert isinstance(config, SqlZenStoreConfiguration)
    config = config.copy()

    if config.driver == SQLDatabaseDriver.MYSQL:
        # Load the certificate values back into the configuration
        config.expand_certificates()

    elif config.driver == SQLDatabaseDriver.SQLITE:
        if load_config_path:
            config.url = cls.get_local_url(str(load_config_path))
        else:
            config.url = cls.get_local_url(config_path)

    return config
expand_certificates(self)

Expands the certificates in the verify_ssl field.

Source code in zenml/zen_stores/sql_zen_store.py
def expand_certificates(self) -> None:
    """Expands the certificates in the verify_ssl field."""
    # Load the certificate values back into the configuration
    for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
        file_path = getattr(self, key, None)
        if file_path and os.path.isfile(file_path):
            with open(file_path, "r") as f:
                setattr(self, key, f.read())
get_local_url(path) staticmethod

Get a local SQL url for a given local path.

Parameters:

Name Type Description Default
path str

The path to the local sqlite file.

required

Returns:

Type Description
str

The local SQL url for the given path.

Source code in zenml/zen_stores/sql_zen_store.py
@staticmethod
def get_local_url(path: str) -> str:
    """Get a local SQL url for a given local path.

    Args:
        path: The path to the local sqlite file.

    Returns:
        The local SQL url for the given path.
    """
    return f"sqlite:///{path}/{ZENML_SQLITE_DB_FILENAME}"
get_sqlmodel_config(self)

Get the SQLModel engine configuration for the SQL ZenML store.

Returns:

Type Description
Tuple[str, Dict[str, Any], Dict[str, Any]]

The URL and connection arguments for the SQLModel engine.

Exceptions:

Type Description
NotImplementedError

If the SQL driver is not supported.

Source code in zenml/zen_stores/sql_zen_store.py
def get_sqlmodel_config(
    self,
) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
    """Get the SQLModel engine configuration for the SQL ZenML store.

    Returns:
        The URL and connection arguments for the SQLModel engine.

    Raises:
        NotImplementedError: If the SQL driver is not supported.
    """
    sql_url = make_url(self.url)
    sqlalchemy_connect_args: Dict[str, Any] = {}
    engine_args = {}
    if sql_url.drivername == SQLDatabaseDriver.SQLITE:
        assert self.database is not None
        # The following default value is needed for sqlite to avoid the
        # Error:
        #   sqlite3.ProgrammingError: SQLite objects created in a thread can
        #   only be used in that same thread.
        sqlalchemy_connect_args = {"check_same_thread": False}
    elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
        # all these are guaranteed by our root validator
        assert self.database is not None
        assert self.username is not None
        assert self.password is not None
        assert sql_url.host is not None

        engine_args = {
            "pool_size": self.pool_size,
            "max_overflow": self.max_overflow,
            "pool_pre_ping": self.pool_pre_ping,
        }

        sql_url = sql_url._replace(
            drivername="mysql+pymysql",
            username=self.username,
            password=self.password,
            database=self.database,
        )

        sqlalchemy_ssl_args: Dict[str, Any] = {}

        # Handle SSL params
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            ssl_setting = getattr(self, key)
            if not ssl_setting:
                continue
            if not os.path.isfile(ssl_setting):
                logger.warning(
                    f"Database SSL setting `{key}` is not a file. "
                )
            sqlalchemy_ssl_args[key.lstrip("ssl_")] = ssl_setting
        if len(sqlalchemy_ssl_args) > 0:
            sqlalchemy_ssl_args[
                "check_hostname"
            ] = self.ssl_verify_server_cert
            sqlalchemy_connect_args["ssl"] = sqlalchemy_ssl_args
    else:
        raise NotImplementedError(
            f"SQL driver `{sql_url.drivername}` is not supported."
        )

    return str(sql_url), sqlalchemy_connect_args, engine_args
supports_url_scheme(url) classmethod

Check if a URL scheme is supported by this store.

Parameters:

Name Type Description Default
url str

The URL to check.

required

Returns:

Type Description
bool

True if the URL scheme is supported, False otherwise.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def supports_url_scheme(cls, url: str) -> bool:
    """Check if a URL scheme is supported by this store.

    Args:
        url: The URL to check.

    Returns:
        True if the URL scheme is supported, False otherwise.
    """
    return make_url(url).drivername in SQLDatabaseDriver.values()
validate_secrets_store(secrets_store) classmethod

Ensures that the secrets store is initialized with a default SQL secrets store.

Parameters:

Name Type Description Default
secrets_store Optional[zenml.config.secrets_store_config.SecretsStoreConfiguration]

The secrets store config to be validated.

required

Returns:

Type Description
SecretsStoreConfiguration

The validated secrets store config.

Source code in zenml/zen_stores/sql_zen_store.py
@validator("secrets_store")
def validate_secrets_store(
    cls, secrets_store: Optional[SecretsStoreConfiguration]
) -> SecretsStoreConfiguration:
    """Ensures that the secrets store is initialized with a default SQL secrets store.

    Args:
        secrets_store: The secrets store config to be validated.

    Returns:
        The validated secrets store config.
    """
    if secrets_store is None:
        secrets_store = SqlSecretsStoreConfiguration()

    return secrets_store
create_artifact(self, artifact)

Creates an artifact.

Parameters:

Name Type Description Default
artifact ArtifactRequestModel

The artifact to create.

required

Returns:

Type Description
ArtifactResponseModel

The created artifact.

Source code in zenml/zen_stores/sql_zen_store.py
def create_artifact(
    self, artifact: ArtifactRequestModel
) -> ArtifactResponseModel:
    """Creates an artifact.

    Args:
        artifact: The artifact to create.

    Returns:
        The created artifact.
    """
    with Session(self.engine) as session:
        # Save artifact.
        artifact_schema = ArtifactSchema.from_request(artifact)
        session.add(artifact_schema)

        # Save visualizations of the artifact.
        if artifact.visualizations:
            for vis in artifact.visualizations:
                vis_schema = ArtifactVisualizationSchema.from_model(
                    visualization=vis, artifact_id=artifact_schema.id
                )
                session.add(vis_schema)

        session.commit()
        return self._artifact_schema_to_model(artifact_schema)
create_build(self, build)

Creates a new build in a workspace.

Parameters:

Name Type Description Default
build PipelineBuildRequestModel

The build to create.

required

Returns:

Type Description
PipelineBuildResponseModel

The newly created build.

Source code in zenml/zen_stores/sql_zen_store.py
def create_build(
    self,
    build: PipelineBuildRequestModel,
) -> PipelineBuildResponseModel:
    """Creates a new build in a workspace.

    Args:
        build: The build to create.

    Returns:
        The newly created build.
    """
    with Session(self.engine) as session:
        # Create the build
        new_build = PipelineBuildSchema.from_request(build)
        session.add(new_build)
        session.commit()
        session.refresh(new_build)

        return new_build.to_model()
create_code_repository(self, code_repository)

Creates a new code repository.

Parameters:

Name Type Description Default
code_repository CodeRepositoryRequestModel

Code repository to be created.

required

Returns:

Type Description
CodeRepositoryResponseModel

The newly created code repository.

Exceptions:

Type Description
EntityExistsError

If a code repository with the given name already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def create_code_repository(
    self, code_repository: CodeRepositoryRequestModel
) -> CodeRepositoryResponseModel:
    """Creates a new code repository.

    Args:
        code_repository: Code repository to be created.

    Returns:
        The newly created code repository.

    Raises:
        EntityExistsError: If a code repository with the given name already
            exists.
    """
    with Session(self.engine) as session:
        existing_repo = session.exec(
            select(CodeRepositorySchema)
            .where(CodeRepositorySchema.name == code_repository.name)
            .where(
                CodeRepositorySchema.workspace_id
                == code_repository.workspace
            )
        ).first()
        if existing_repo is not None:
            raise EntityExistsError(
                f"Unable to create code repository in workspace "
                f"'{code_repository.workspace}': A code repository with "
                "this name already exists."
            )

        new_repo = CodeRepositorySchema.from_request(code_repository)
        session.add(new_repo)
        session.commit()
        session.refresh(new_repo)

        return new_repo.to_model()
create_deployment(self, deployment)

Creates a new deployment in a workspace.

Parameters:

Name Type Description Default
deployment PipelineDeploymentRequestModel

The deployment to create.

required

Returns:

Type Description
PipelineDeploymentResponseModel

The newly created deployment.

Source code in zenml/zen_stores/sql_zen_store.py
def create_deployment(
    self,
    deployment: PipelineDeploymentRequestModel,
) -> PipelineDeploymentResponseModel:
    """Creates a new deployment in a workspace.

    Args:
        deployment: The deployment to create.

    Returns:
        The newly created deployment.
    """
    with Session(self.engine) as session:
        code_reference_id = self._create_or_reuse_code_reference(
            session=session,
            workspace_id=deployment.workspace,
            code_reference=deployment.code_reference,
        )

        new_deployment = PipelineDeploymentSchema.from_request(
            deployment, code_reference_id=code_reference_id
        )
        session.add(new_deployment)
        session.commit()
        session.refresh(new_deployment)

        return new_deployment.to_model()
create_flavor(*args, **kwargs)

Creates a new stack component flavor.

Parameters:

Name Type Description Default
flavor

The stack component flavor to create.

required

Returns:

Type Description
Any

The newly created flavor.

Exceptions:

Type Description
EntityExistsError

If a flavor with the same name and type is already owned by this user in this workspace.

ValueError

In case the config_schema string exceeds the max length.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_pipeline(*args, **kwargs)

Creates a new pipeline in a workspace.

Parameters:

Name Type Description Default
pipeline

The pipeline to create.

required

Returns:

Type Description
Any

The newly created pipeline.

Exceptions:

Type Description
EntityExistsError

If an identical pipeline already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_role(*args, **kwargs)

Creates a new role.

Parameters:

Name Type Description Default
role

The role model to create.

required

Returns:

Type Description
Any

The newly created role.

Exceptions:

Type Description
EntityExistsError

If a role with the given name already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_run(self, pipeline_run)

Creates a pipeline run.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to create.

required

Returns:

Type Description
PipelineRunResponseModel

The created pipeline run.

Exceptions:

Type Description
EntityExistsError

If an identical pipeline run already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Creates a pipeline run.

    Args:
        pipeline_run: The pipeline run to create.

    Returns:
        The created pipeline run.

    Raises:
        EntityExistsError: If an identical pipeline run already exists.
    """
    with Session(self.engine) as session:
        # Check if pipeline run with same name already exists.
        existing_domain_run = session.exec(
            select(PipelineRunSchema).where(
                PipelineRunSchema.name == pipeline_run.name
            )
        ).first()
        if existing_domain_run is not None:
            raise EntityExistsError(
                f"Unable to create pipeline run: A pipeline run with name "
                f"'{pipeline_run.name}' already exists."
            )

        # Check if pipeline run with same ID already exists.
        existing_id_run = session.exec(
            select(PipelineRunSchema).where(
                PipelineRunSchema.id == pipeline_run.id
            )
        ).first()
        if existing_id_run is not None:
            raise EntityExistsError(
                f"Unable to create pipeline run: A pipeline run with ID "
                f"'{pipeline_run.id}' already exists."
            )

        # Query stack to ensure it exists in the DB
        stack_id = None
        if pipeline_run.stack is not None:
            stack_id = session.exec(
                select(StackSchema.id).where(
                    StackSchema.id == pipeline_run.stack
                )
            ).first()
            if stack_id is None:
                logger.warning(
                    f"No stack found for this run. "
                    f"Creating pipeline run '{pipeline_run.name}' without "
                    "linked stack."
                )

        # Query pipeline to ensure it exists in the DB
        pipeline_id = None
        if pipeline_run.pipeline is not None:
            pipeline_id = session.exec(
                select(PipelineSchema.id).where(
                    PipelineSchema.id == pipeline_run.pipeline
                )
            ).first()
            if pipeline_id is None:
                logger.warning(
                    f"No pipeline found. Creating pipeline run "
                    f"'{pipeline_run.name}' as unlisted run."
                )

        # Create the pipeline run
        new_run = PipelineRunSchema.from_request(pipeline_run)
        session.add(new_run)
        session.commit()

        return self._run_schema_to_model(new_run)
create_run_metadata(self, run_metadata)

Creates run metadata.

Parameters:

Name Type Description Default
run_metadata RunMetadataRequestModel

The run metadata to create.

required

Returns:

Type Description
RunMetadataResponseModel

The created run metadata.

Source code in zenml/zen_stores/sql_zen_store.py
def create_run_metadata(
    self, run_metadata: RunMetadataRequestModel
) -> RunMetadataResponseModel:
    """Creates run metadata.

    Args:
        run_metadata: The run metadata to create.

    Returns:
        The created run metadata.
    """
    with Session(self.engine) as session:
        run_metadata_schema = RunMetadataSchema.from_request(run_metadata)
        session.add(run_metadata_schema)
        session.commit()
        return run_metadata_schema.to_model()
create_run_step(self, step_run)

Creates a step run.

Parameters:

Name Type Description Default
step_run StepRunRequestModel

The step run to create.

required

Returns:

Type Description
StepRunResponseModel

The created step run.

Exceptions:

Type Description
EntityExistsError

if the step run already exists.

KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def create_run_step(
    self, step_run: StepRunRequestModel
) -> StepRunResponseModel:
    """Creates a step run.

    Args:
        step_run: The step run to create.

    Returns:
        The created step run.

    Raises:
        EntityExistsError: if the step run already exists.
        KeyError: if the pipeline run doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if the pipeline run exists
        run = session.exec(
            select(PipelineRunSchema).where(
                PipelineRunSchema.id == step_run.pipeline_run_id
            )
        ).first()
        if run is None:
            raise KeyError(
                f"Unable to create step '{step_run.name}': No pipeline run "
                f"with ID '{step_run.pipeline_run_id}' found."
            )

        # Check if the step name already exists in the pipeline run
        existing_step_run = session.exec(
            select(StepRunSchema)
            .where(StepRunSchema.name == step_run.name)
            .where(
                StepRunSchema.pipeline_run_id == step_run.pipeline_run_id
            )
        ).first()
        if existing_step_run is not None:
            raise EntityExistsError(
                f"Unable to create step '{step_run.name}': A step with this "
                f"name already exists in the pipeline run with ID "
                f"'{step_run.pipeline_run_id}'."
            )

        # Create the step
        step_schema = StepRunSchema.from_request(step_run)
        session.add(step_schema)

        # Add logs entry for the step if exists
        if step_run.logs is not None:
            log_entry = LogsSchema(
                uri=step_run.logs.uri,
                step_run_id=step_schema.id,
                artifact_store_id=step_run.logs.artifact_store_id,
            )
            session.add(log_entry)

        # Save parent step IDs into the database.
        for parent_step_id in step_run.parent_step_ids:
            self._set_run_step_parent_step(
                child_id=step_schema.id,
                parent_id=parent_step_id,
                session=session,
            )

        # Save input artifact IDs into the database.
        for input_name, artifact_id in step_run.inputs.items():
            self._set_run_step_input_artifact(
                run_step_id=step_schema.id,
                artifact_id=artifact_id,
                name=input_name,
                session=session,
            )

        # Save output artifact IDs into the database.
        for output_name, artifact_id in step_run.outputs.items():
            self._set_run_step_output_artifact(
                step_run_id=step_schema.id,
                artifact_id=artifact_id,
                name=output_name,
                session=session,
            )

        session.commit()

        return self._run_step_schema_to_model(step_schema)
create_schedule(self, schedule)

Creates a new schedule.

Parameters:

Name Type Description Default
schedule ScheduleRequestModel

The schedule to create.

required

Returns:

Type Description
ScheduleResponseModel

The newly created schedule.

Source code in zenml/zen_stores/sql_zen_store.py
def create_schedule(
    self, schedule: ScheduleRequestModel
) -> ScheduleResponseModel:
    """Creates a new schedule.

    Args:
        schedule: The schedule to create.

    Returns:
        The newly created schedule.
    """
    with Session(self.engine) as session:
        new_schedule = ScheduleSchema.from_create_model(model=schedule)
        session.add(new_schedule)
        session.commit()
        return new_schedule.to_model()
create_service_connector(*args, **kwargs)

Creates a new service connector.

Parameters:

Name Type Description Default
service_connector

Service connector to be created.

required

Returns:

Type Description
Any

The newly created service connector.

Exceptions:

Type Description
Exception

If anything goes wrong during the creation of the service connector.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_stack(*args, **kwargs)

Register a new stack.

Parameters:

Name Type Description Default
stack

The stack to register.

required

Returns:

Type Description
Any

The registered stack.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_stack_component(*args, **kwargs)

Create a stack component.

Parameters:

Name Type Description Default
component

The stack component to create.

required

Returns:

Type Description
Any

The created stack component.

Exceptions:

Type Description
KeyError

if the stack component references a non-existent connector.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_team(*args, **kwargs)

Creates a new team.

Parameters:

Name Type Description Default
team

The team model to create.

required

Returns:

Type Description
Any

The newly created team.

Exceptions:

Type Description
EntityExistsError

If a team with the given name already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_team_role_assignment(self, team_role_assignment)

Creates a new team role assignment.

Parameters:

Name Type Description Default
team_role_assignment TeamRoleAssignmentRequestModel

The role assignment model to create.

required

Returns:

Type Description
TeamRoleAssignmentResponseModel

The newly created role assignment.

Exceptions:

Type Description
EntityExistsError

If the role assignment already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def create_team_role_assignment(
    self, team_role_assignment: TeamRoleAssignmentRequestModel
) -> TeamRoleAssignmentResponseModel:
    """Creates a new team role assignment.

    Args:
        team_role_assignment: The role assignment model to create.

    Returns:
        The newly created role assignment.

    Raises:
        EntityExistsError: If the role assignment already exists.
    """
    with Session(self.engine) as session:
        role = self._get_role_schema(
            team_role_assignment.role, session=session
        )
        workspace: Optional[WorkspaceSchema] = None
        if team_role_assignment.workspace:
            workspace = self._get_workspace_schema(
                team_role_assignment.workspace, session=session
            )
        team = self._get_team_schema(
            team_role_assignment.team, session=session
        )
        query = select(UserRoleAssignmentSchema).where(
            UserRoleAssignmentSchema.user_id == team.id,
            UserRoleAssignmentSchema.role_id == role.id,
        )
        if workspace is not None:
            query = query.where(
                UserRoleAssignmentSchema.workspace_id == workspace.id
            )
        existing_role_assignment = session.exec(query).first()
        if existing_role_assignment is not None:
            raise EntityExistsError(
                f"Unable to assign role '{role.name}' to team "
                f"'{team.name}': Role already assigned in this workspace."
            )
        role_assignment = TeamRoleAssignmentSchema(
            role_id=role.id,
            team_id=team.id,
            workspace_id=workspace.id if workspace else None,
            role=role,
            team=team,
            workspace=workspace,
        )
        session.add(role_assignment)
        session.commit()
        return role_assignment.to_model()
create_user(*args, **kwargs)

Creates a new user.

Parameters:

Name Type Description Default
user

User to be created.

required

Returns:

Type Description
Any

The newly created user.

Exceptions:

Type Description
EntityExistsError

If a user with the given name already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
create_user_role_assignment(self, user_role_assignment)

Assigns a role to a user or team, scoped to a specific workspace.

Parameters:

Name Type Description Default
user_role_assignment UserRoleAssignmentRequestModel

The role assignment to create.

required

Returns:

Type Description
UserRoleAssignmentResponseModel

The created role assignment.

Exceptions:

Type Description
EntityExistsError

if the role assignment already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def create_user_role_assignment(
    self, user_role_assignment: UserRoleAssignmentRequestModel
) -> UserRoleAssignmentResponseModel:
    """Assigns a role to a user or team, scoped to a specific workspace.

    Args:
        user_role_assignment: The role assignment to create.

    Returns:
        The created role assignment.

    Raises:
        EntityExistsError: if the role assignment already exists.
    """
    with Session(self.engine) as session:
        role = self._get_role_schema(
            user_role_assignment.role, session=session
        )
        workspace: Optional[WorkspaceSchema] = None
        if user_role_assignment.workspace:
            workspace = self._get_workspace_schema(
                user_role_assignment.workspace, session=session
            )
        user = self._get_user_schema(
            user_role_assignment.user, session=session
        )
        query = select(UserRoleAssignmentSchema).where(
            UserRoleAssignmentSchema.user_id == user.id,
            UserRoleAssignmentSchema.role_id == role.id,
        )
        if workspace is not None:
            query = query.where(
                UserRoleAssignmentSchema.workspace_id == workspace.id
            )
        existing_role_assignment = session.exec(query).first()
        if existing_role_assignment is not None:
            raise EntityExistsError(
                f"Unable to assign role '{role.name}' to user "
                f"'{user.name}': Role already assigned in this workspace."
            )
        role_assignment = UserRoleAssignmentSchema(
            role_id=role.id,
            user_id=user.id,
            workspace_id=workspace.id if workspace else None,
            role=role,
            user=user,
            workspace=workspace,
        )
        session.add(role_assignment)
        session.commit()
        return role_assignment.to_model()
create_workspace(*args, **kwargs)

Creates a new workspace.

Parameters:

Name Type Description Default
workspace

The workspace to create.

required

Returns:

Type Description
Any

The newly created workspace.

Exceptions:

Type Description
EntityExistsError

If a workspace with the given name already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_artifact(self, artifact_id)

Deletes an artifact.

Parameters:

Name Type Description Default
artifact_id UUID

The ID of the artifact to delete.

required

Exceptions:

Type Description
KeyError

if the artifact doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_artifact(self, artifact_id: UUID) -> None:
    """Deletes an artifact.

    Args:
        artifact_id: The ID of the artifact to delete.

    Raises:
        KeyError: if the artifact doesn't exist.
    """
    with Session(self.engine) as session:
        artifact = session.exec(
            select(ArtifactSchema).where(ArtifactSchema.id == artifact_id)
        ).first()
        if artifact is None:
            raise KeyError(
                f"Unable to delete artifact with ID {artifact_id}: "
                f"No artifact with this ID found."
            )
        session.delete(artifact)
        session.commit()
delete_build(self, build_id)

Deletes a build.

Parameters:

Name Type Description Default
build_id UUID

The ID of the build to delete.

required

Exceptions:

Type Description
KeyError

if the build doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_build(self, build_id: UUID) -> None:
    """Deletes a build.

    Args:
        build_id: The ID of the build to delete.

    Raises:
        KeyError: if the build doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if build with the given ID exists
        build = session.exec(
            select(PipelineBuildSchema).where(
                PipelineBuildSchema.id == build_id
            )
        ).first()
        if build is None:
            raise KeyError(
                f"Unable to delete build with ID {build_id}: "
                f"No build with this ID found."
            )

        session.delete(build)
        session.commit()
delete_code_repository(self, code_repository_id)

Deletes a code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to delete.

required

Exceptions:

Type Description
KeyError

If no code repository with the given ID exists.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_code_repository(self, code_repository_id: UUID) -> None:
    """Deletes a code repository.

    Args:
        code_repository_id: The ID of the code repository to delete.

    Raises:
        KeyError: If no code repository with the given ID exists.
    """
    with Session(self.engine) as session:
        existing_repo = session.exec(
            select(CodeRepositorySchema).where(
                CodeRepositorySchema.id == code_repository_id
            )
        ).first()
        if existing_repo is None:
            raise KeyError(
                f"Unable to delete code repository with ID "
                f"{code_repository_id}: No code repository with this ID "
                "found."
            )

        session.delete(existing_repo)
        session.commit()
delete_deployment(self, deployment_id)

Deletes a deployment.

Parameters:

Name Type Description Default
deployment_id UUID

The ID of the deployment to delete.

required

Exceptions:

Type Description
KeyError

If the deployment doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_deployment(self, deployment_id: UUID) -> None:
    """Deletes a deployment.

    Args:
        deployment_id: The ID of the deployment to delete.

    Raises:
        KeyError: If the deployment doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if build with the given ID exists
        deployment = session.exec(
            select(PipelineDeploymentSchema).where(
                PipelineDeploymentSchema.id == deployment_id
            )
        ).first()
        if deployment is None:
            raise KeyError(
                f"Unable to delete deployment with ID {deployment_id}: "
                f"No deployment with this ID found."
            )

        session.delete(deployment)
        session.commit()
delete_flavor(*args, **kwargs)

Delete a flavor.

Parameters:

Name Type Description Default
flavor_id

The id of the flavor to delete.

required

Exceptions:

Type Description
KeyError

if the flavor doesn't exist.

IllegalOperationError

if the flavor is used by a stack component.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_pipeline(*args, **kwargs)

Deletes a pipeline.

Parameters:

Name Type Description Default
pipeline_id

The ID of the pipeline to delete.

required

Exceptions:

Type Description
KeyError

if the pipeline doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_role(*args, **kwargs)

Deletes a role.

Parameters:

Name Type Description Default
role_name_or_id

Name or ID of the role to delete.

required

Exceptions:

Type Description
IllegalOperationError

If the role is still assigned to users or the role is one of the built-in roles.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_run(self, run_id)

Deletes a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to delete.

required

Exceptions:

Type Description
KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_run(self, run_id: UUID) -> None:
    """Deletes a pipeline run.

    Args:
        run_id: The ID of the pipeline run to delete.

    Raises:
        KeyError: if the pipeline run doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if pipeline run with the given ID exists
        existing_run = session.exec(
            select(PipelineRunSchema).where(PipelineRunSchema.id == run_id)
        ).first()
        if existing_run is None:
            raise KeyError(
                f"Unable to delete pipeline run with ID {run_id}: "
                f"No pipeline run with this ID found."
            )

        # Delete the pipeline run
        session.delete(existing_run)
        session.commit()
delete_schedule(self, schedule_id)

Deletes a schedule.

Parameters:

Name Type Description Default
schedule_id UUID

The ID of the schedule to delete.

required

Exceptions:

Type Description
KeyError

if the schedule doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_schedule(self, schedule_id: UUID) -> None:
    """Deletes a schedule.

    Args:
        schedule_id: The ID of the schedule to delete.

    Raises:
        KeyError: if the schedule doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if schedule with the given ID exists
        schedule = session.exec(
            select(ScheduleSchema).where(ScheduleSchema.id == schedule_id)
        ).first()
        if schedule is None:
            raise KeyError(
                f"Unable to delete schedule with ID {schedule_id}: "
                f"No schedule with this ID found."
            )

        # Delete the schedule
        session.delete(schedule)
        session.commit()
delete_service_connector(self, service_connector_id)

Deletes a service connector.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to delete.

required

Exceptions:

Type Description
KeyError

If no service connector with the given ID exists.

IllegalOperationError

If the service connector is still referenced by one or more stack components.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_service_connector(self, service_connector_id: UUID) -> None:
    """Deletes a service connector.

    Args:
        service_connector_id: The ID of the service connector to delete.

    Raises:
        KeyError: If no service connector with the given ID exists.
        IllegalOperationError: If the service connector is still referenced
            by one or more stack components.
    """
    with Session(self.engine) as session:
        try:
            service_connector = session.exec(
                select(ServiceConnectorSchema).where(
                    ServiceConnectorSchema.id == service_connector_id
                )
            ).one()

            if service_connector is None:
                raise KeyError(
                    f"Service connector with ID {service_connector_id} not "
                    "found."
                )

            if len(service_connector.components) > 0:
                raise IllegalOperationError(
                    f"Service connector with ID {service_connector_id} "
                    f"cannot be deleted as it is still referenced by "
                    f"{len(service_connector.components)} "
                    "stack components. Before deleting this service "
                    "connector, make sure to remove it from all stack "
                    "components."
                )
            else:
                session.delete(service_connector)

            if service_connector.secret_id and self.secrets_store:
                try:
                    self.secrets_store.delete_secret(
                        service_connector.secret_id
                    )
                except KeyError:
                    # If the secret doesn't exist anymore, we can ignore
                    # this error
                    pass
        except NoResultFound as error:
            raise KeyError from error

        session.commit()
delete_stack(*args, **kwargs)

Delete a stack.

Parameters:

Name Type Description Default
stack_id

The ID of the stack to delete.

required

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

IllegalOperationError

if the stack is a default stack.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_stack_component(*args, **kwargs)

Delete a stack component.

Parameters:

Name Type Description Default
component_id

The id of the stack component to delete.

required

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

IllegalOperationError

if the stack component is part of one or more stacks, or if it's a default stack component.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_team(*args, **kwargs)

Deletes a team.

Parameters:

Name Type Description Default
team_name_or_id

Name or ID of the team to delete.

required
Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_team_role_assignment(self, team_role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
team_role_assignment_id UUID

The ID of the specific role assignment

required

Exceptions:

Type Description
KeyError

If the role assignment does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_team_role_assignment(
    self, team_role_assignment_id: UUID
) -> None:
    """Delete a specific role assignment.

    Args:
        team_role_assignment_id: The ID of the specific role assignment

    Raises:
        KeyError: If the role assignment does not exist.
    """
    with Session(self.engine) as session:
        team_role = session.exec(
            select(TeamRoleAssignmentSchema).where(
                TeamRoleAssignmentSchema.id == team_role_assignment_id
            )
        ).one_or_none()
        if not team_role:
            raise KeyError(
                f"No team role assignment with id "
                f"{team_role_assignment_id} exists."
            )

        session.delete(team_role)

        session.commit()
delete_user(*args, **kwargs)

Deletes a user.

Parameters:

Name Type Description Default
user_name_or_id

The name or the ID of the user to delete.

required

Exceptions:

Type Description
IllegalOperationError

If the user is the default user account.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
delete_user_role_assignment(self, user_role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
user_role_assignment_id UUID

The ID of the specific role assignment.

required

Exceptions:

Type Description
KeyError

If the role assignment does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_user_role_assignment(
    self, user_role_assignment_id: UUID
) -> None:
    """Delete a specific role assignment.

    Args:
        user_role_assignment_id: The ID of the specific role assignment.

    Raises:
        KeyError: If the role assignment does not exist.
    """
    with Session(self.engine) as session:
        user_role = session.exec(
            select(UserRoleAssignmentSchema).where(
                UserRoleAssignmentSchema.id == user_role_assignment_id
            )
        ).one_or_none()
        if not user_role:
            raise KeyError(
                f"No user role assignment with id "
                f"{user_role_assignment_id} exists."
            )

        session.delete(user_role)

        session.commit()
delete_workspace(*args, **kwargs)

Deletes a workspace.

Parameters:

Name Type Description Default
workspace_name_or_id

Name or ID of the workspace to delete.

required

Exceptions:

Type Description
IllegalOperationError

If the workspace is the default workspace.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
filter_and_paginate(session, query, table, filter_model, custom_schema_to_model_conversion=None, custom_fetch=None) classmethod

Given a query, return a Page instance with a list of filtered Models.

Parameters:

Name Type Description Default
session Session

The SQLModel Session

required
query Union[sqlmodel.sql.expression.Select, sqlmodel.sql.expression.SelectOfScalar]

The query to execute

required
table Type[~AnySchema]

The table to select from

required
filter_model BaseFilterModel

The filter to use, including pagination and sorting

required
custom_schema_to_model_conversion Optional[Callable[[~AnySchema], ~B]]

Callable to convert the schema into a model. This is used if the Model contains additional data that is not explicitly stored as a field or relationship on the model.

None
custom_fetch Optional[Callable[[sqlmodel.orm.session.Session, Union[sqlmodel.sql.expression.Select, sqlmodel.sql.expression.SelectOfScalar], zenml.models.filter_models.BaseFilterModel], List[~AnySchema]]]

Custom callable to use to fetch items from the database for a given query. This is used if the items fetched from the database need to be processed differently (e.g. to perform additional filtering). The callable should take a Session, a Select query and a BaseFilterModel filter as arguments and return a List of items.

None

Returns:

Type Description
Page[B]

The Domain Model representation of the DB resource

Exceptions:

Type Description
ValueError

if the filtered page number is out of bounds.

RuntimeError

if the schema does not have a to_model method.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def filter_and_paginate(
    cls,
    session: Session,
    query: Union[Select[AnySchema], SelectOfScalar[AnySchema]],
    table: Type[AnySchema],
    filter_model: BaseFilterModel,
    custom_schema_to_model_conversion: Optional[
        Callable[[AnySchema], B]
    ] = None,
    custom_fetch: Optional[
        Callable[
            [
                Session,
                Union[Select[AnySchema], SelectOfScalar[AnySchema]],
                BaseFilterModel,
            ],
            List[AnySchema],
        ]
    ] = None,
) -> Page[B]:
    """Given a query, return a Page instance with a list of filtered Models.

    Args:
        session: The SQLModel Session
        query: The query to execute
        table: The table to select from
        filter_model: The filter to use, including pagination and sorting
        custom_schema_to_model_conversion: Callable to convert the schema
            into a model. This is used if the Model contains additional
            data that is not explicitly stored as a field or relationship
            on the model.
        custom_fetch: Custom callable to use to fetch items from the
            database for a given query. This is used if the items fetched
            from the database need to be processed differently (e.g. to
            perform additional filtering). The callable should take a
            `Session`, a `Select` query and a `BaseFilterModel` filter as
            arguments and return a `List` of items.

    Returns:
        The Domain Model representation of the DB resource

    Raises:
        ValueError: if the filtered page number is out of bounds.
        RuntimeError: if the schema does not have a `to_model` method.
    """
    query = filter_model.apply_filter(query=query, table=table)

    # Get the total amount of items in the database for a given query
    if custom_fetch:
        total = len(custom_fetch(session, query, filter_model))
    else:
        total = session.scalar(
            select([func.count("*")]).select_from(
                query.options(noload("*")).subquery()
            )
        )

    # Sorting
    column, operand = filter_model.sorting_params
    if operand == SorterOps.DESCENDING:
        query = query.order_by(desc(getattr(table, column)))
    else:
        query = query.order_by(asc(getattr(table, column)))

    # Get the total amount of pages in the database for a given query
    if total == 0:
        total_pages = 1
    else:
        total_pages = math.ceil(total / filter_model.size)

    if filter_model.page > total_pages:
        raise ValueError(
            f"Invalid page {filter_model.page}. The requested page size is "
            f"{filter_model.size} and there are a total of {total} items "
            f"for this query. The maximum page value therefore is "
            f"{total_pages}."
        )

    # Get a page of the actual data
    item_schemas: List[AnySchema]
    if custom_fetch:
        item_schemas = custom_fetch(session, query, filter_model)
        # select the items in the current page
        item_schemas = item_schemas[
            filter_model.offset : filter_model.offset + filter_model.size
        ]
    else:
        item_schemas = (
            session.exec(
                query.limit(filter_model.size).offset(filter_model.offset)
            )
            .unique()
            .all()
        )

    # Convert this page of items from schemas to models.
    items: List[B] = []
    for schema in item_schemas:
        # If a custom conversion function is provided, use it.
        if custom_schema_to_model_conversion:
            items.append(custom_schema_to_model_conversion(schema))
            continue
        # Otherwise, try to use the `to_model` method of the schema.
        to_model = getattr(schema, "to_model", None)
        if callable(to_model):
            items.append(to_model())
            continue
        # If neither of the above work, raise an error.
        raise RuntimeError(
            f"Cannot convert schema `{schema.__class__.__name__}` to model "
            "since it does not have a `to_model` method."
        )

    return Page(
        total=total,
        total_pages=total_pages,
        items=items,
        index=filter_model.page,
        max_size=filter_model.size,
    )
get_artifact(self, artifact_id)

Gets an artifact.

Parameters:

Name Type Description Default
artifact_id UUID

The ID of the artifact to get.

required

Returns:

Type Description
ArtifactResponseModel

The artifact.

Exceptions:

Type Description
KeyError

if the artifact doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_artifact(self, artifact_id: UUID) -> ArtifactResponseModel:
    """Gets an artifact.

    Args:
        artifact_id: The ID of the artifact to get.

    Returns:
        The artifact.

    Raises:
        KeyError: if the artifact doesn't exist.
    """
    with Session(self.engine) as session:
        artifact = session.exec(
            select(ArtifactSchema).where(ArtifactSchema.id == artifact_id)
        ).first()
        if artifact is None:
            raise KeyError(
                f"Unable to get artifact with ID {artifact_id}: "
                f"No artifact with this ID found."
            )
        return self._artifact_schema_to_model(artifact)
get_auth_user(self, user_name_or_id)

Gets the auth model to a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Returns:

Type Description
UserAuthModel

The requested user, if it was found.

Source code in zenml/zen_stores/sql_zen_store.py
def get_auth_user(
    self, user_name_or_id: Union[str, UUID]
) -> UserAuthModel:
    """Gets the auth model to a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Returns:
        The requested user, if it was found.
    """
    with Session(self.engine) as session:
        user = self._get_user_schema(user_name_or_id, session=session)
        return UserAuthModel(
            id=user.id,
            name=user.name,
            full_name=user.full_name,
            email_opted_in=user.email_opted_in,
            active=user.active,
            created=user.created,
            updated=user.updated,
            password=user.password,
            activation_token=user.activation_token,
        )
get_build(self, build_id)

Get a build with a given ID.

Parameters:

Name Type Description Default
build_id UUID

ID of the build.

required

Returns:

Type Description
PipelineBuildResponseModel

The build.

Exceptions:

Type Description
KeyError

If the build does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_build(self, build_id: UUID) -> PipelineBuildResponseModel:
    """Get a build with a given ID.

    Args:
        build_id: ID of the build.

    Returns:
        The build.

    Raises:
        KeyError: If the build does not exist.
    """
    with Session(self.engine) as session:
        # Check if build with the given ID exists
        build = session.exec(
            select(PipelineBuildSchema).where(
                PipelineBuildSchema.id == build_id
            )
        ).first()
        if build is None:
            raise KeyError(
                f"Unable to get build with ID '{build_id}': "
                "No build with this ID found."
            )

        return build.to_model()
get_code_repository(self, code_repository_id)

Gets a specific code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to get.

required

Returns:

Type Description
CodeRepositoryResponseModel

The requested code repository, if it was found.

Exceptions:

Type Description
KeyError

If no code repository with the given ID exists.

Source code in zenml/zen_stores/sql_zen_store.py
def get_code_repository(
    self, code_repository_id: UUID
) -> CodeRepositoryResponseModel:
    """Gets a specific code repository.

    Args:
        code_repository_id: The ID of the code repository to get.

    Returns:
        The requested code repository, if it was found.

    Raises:
        KeyError: If no code repository with the given ID exists.
    """
    with Session(self.engine) as session:
        repo = session.exec(
            select(CodeRepositorySchema).where(
                CodeRepositorySchema.id == code_repository_id
            )
        ).first()
        if repo is None:
            raise KeyError(
                f"Unable to get code repository with ID "
                f"'{code_repository_id}': No code repository with this "
                "ID found."
            )

        return repo.to_model()
get_deployment(self, deployment_id)

Get a deployment with a given ID.

Parameters:

Name Type Description Default
deployment_id UUID

ID of the deployment.

required

Returns:

Type Description
PipelineDeploymentResponseModel

The deployment.

Exceptions:

Type Description
KeyError

If the deployment does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_deployment(
    self, deployment_id: UUID
) -> PipelineDeploymentResponseModel:
    """Get a deployment with a given ID.

    Args:
        deployment_id: ID of the deployment.

    Returns:
        The deployment.

    Raises:
        KeyError: If the deployment does not exist.
    """
    with Session(self.engine) as session:
        # Check if deployment with the given ID exists
        deployment = session.exec(
            select(PipelineDeploymentSchema).where(
                PipelineDeploymentSchema.id == deployment_id
            )
        ).first()
        if deployment is None:
            raise KeyError(
                f"Unable to get deployment with ID '{deployment_id}': "
                "No deployment with this ID found."
            )

        return deployment.to_model()
get_flavor(self, flavor_id)

Get a flavor by ID.

Parameters:

Name Type Description Default
flavor_id UUID

The ID of the flavor to fetch.

required

Returns:

Type Description
FlavorResponseModel

The stack component flavor.

Exceptions:

Type Description
KeyError

if the stack component flavor doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
    """Get a flavor by ID.

    Args:
        flavor_id: The ID of the flavor to fetch.

    Returns:
        The stack component flavor.

    Raises:
        KeyError: if the stack component flavor doesn't exist.
    """
    with Session(self.engine) as session:
        flavor_in_db = session.exec(
            select(FlavorSchema).where(FlavorSchema.id == flavor_id)
        ).first()
        if flavor_in_db is None:
            raise KeyError(f"Flavor with ID {flavor_id} not found.")
        return flavor_in_db.to_model()
get_or_create_run(self, pipeline_run)

Gets or creates a pipeline run.

If a run with the same ID or name already exists, it is returned. Otherwise, a new run is created.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to get or create.

required

Returns:

Type Description
Tuple[zenml.models.pipeline_run_models.PipelineRunResponseModel, bool]

The pipeline run, and a boolean indicating whether the run was created or not.

Source code in zenml/zen_stores/sql_zen_store.py
def get_or_create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> Tuple[PipelineRunResponseModel, bool]:
    """Gets or creates a pipeline run.

    If a run with the same ID or name already exists, it is returned.
    Otherwise, a new run is created.

    Args:
        pipeline_run: The pipeline run to get or create.

    Returns:
        The pipeline run, and a boolean indicating whether the run was
        created or not.
    """
    # We want to have the 'create' statement in the try block since running
    # it first will reduce concurrency issues.
    try:
        return self.create_run(pipeline_run), True
    except (EntityExistsError, IntegrityError):
        # Catch both `EntityExistsError`` and `IntegrityError`` exceptions
        # since either one can be raised by the database when trying
        # to create a new pipeline run with duplicate ID or name.
        try:
            return self.get_run(pipeline_run.id), False
        except KeyError:
            return self.get_run(pipeline_run.name), False
get_pipeline(self, pipeline_id)

Get a pipeline with a given ID.

Parameters:

Name Type Description Default
pipeline_id UUID

ID of the pipeline.

required

Returns:

Type Description
PipelineResponseModel

The pipeline.

Exceptions:

Type Description
KeyError

if the pipeline does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
    """Get a pipeline with a given ID.

    Args:
        pipeline_id: ID of the pipeline.

    Returns:
        The pipeline.

    Raises:
        KeyError: if the pipeline does not exist.
    """
    with Session(self.engine) as session:
        # Check if pipeline with the given ID exists
        pipeline = session.exec(
            select(PipelineSchema).where(PipelineSchema.id == pipeline_id)
        ).first()
        if pipeline is None:
            raise KeyError(
                f"Unable to get pipeline with ID '{pipeline_id}': "
                "No pipeline with this ID found."
            )

        return pipeline.to_model()
get_role(self, role_name_or_id)

Gets a specific role.

Parameters:

Name Type Description Default
role_name_or_id Union[str, uuid.UUID]

Name or ID of the role to get.

required

Returns:

Type Description
RoleResponseModel

The requested role.

Source code in zenml/zen_stores/sql_zen_store.py
def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
    """Gets a specific role.

    Args:
        role_name_or_id: Name or ID of the role to get.

    Returns:
        The requested role.
    """
    with Session(self.engine) as session:
        role = self._get_role_schema(role_name_or_id, session=session)
        return role.to_model()
get_run(self, run_name_or_id)

Gets a pipeline run.

Parameters:

Name Type Description Default
run_name_or_id Union[str, uuid.UUID]

The name or ID of the pipeline run to get.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Source code in zenml/zen_stores/sql_zen_store.py
def get_run(
    self, run_name_or_id: Union[str, UUID]
) -> PipelineRunResponseModel:
    """Gets a pipeline run.

    Args:
        run_name_or_id: The name or ID of the pipeline run to get.

    Returns:
        The pipeline run.
    """
    with Session(self.engine) as session:
        run = self._get_run_schema(run_name_or_id, session=session)
        return self._run_schema_to_model(run)
get_run_step(self, step_run_id)

Get a step run by ID.

Parameters:

Name Type Description Default
step_run_id UUID

The ID of the step run to get.

required

Returns:

Type Description
StepRunResponseModel

The step run.

Exceptions:

Type Description
KeyError

if the step run doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_run_step(self, step_run_id: UUID) -> StepRunResponseModel:
    """Get a step run by ID.

    Args:
        step_run_id: The ID of the step run to get.

    Returns:
        The step run.

    Raises:
        KeyError: if the step run doesn't exist.
    """
    with Session(self.engine) as session:
        step_run = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == step_run_id)
        ).first()
        if step_run is None:
            raise KeyError(
                f"Unable to get step run with ID {step_run_id}: No step "
                "run with this ID found."
            )
        return self._run_step_schema_to_model(step_run)
get_schedule(self, schedule_id)

Get a schedule with a given ID.

Parameters:

Name Type Description Default
schedule_id UUID

ID of the schedule.

required

Returns:

Type Description
ScheduleResponseModel

The schedule.

Exceptions:

Type Description
KeyError

if the schedule does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_schedule(self, schedule_id: UUID) -> ScheduleResponseModel:
    """Get a schedule with a given ID.

    Args:
        schedule_id: ID of the schedule.

    Returns:
        The schedule.

    Raises:
        KeyError: if the schedule does not exist.
    """
    with Session(self.engine) as session:
        # Check if schedule with the given ID exists
        schedule = session.exec(
            select(ScheduleSchema).where(ScheduleSchema.id == schedule_id)
        ).first()
        if schedule is None:
            raise KeyError(
                f"Unable to get schedule with ID '{schedule_id}': "
                "No schedule with this ID found."
            )
        return schedule.to_model()
get_service_connector(self, service_connector_id)

Gets a specific service connector.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to get.

required

Returns:

Type Description
ServiceConnectorResponseModel

The requested service connector, if it was found.

Exceptions:

Type Description
KeyError

If no service connector with the given ID exists.

Source code in zenml/zen_stores/sql_zen_store.py
def get_service_connector(
    self, service_connector_id: UUID
) -> ServiceConnectorResponseModel:
    """Gets a specific service connector.

    Args:
        service_connector_id: The ID of the service connector to get.

    Returns:
        The requested service connector, if it was found.

    Raises:
        KeyError: If no service connector with the given ID exists.
    """
    with Session(self.engine) as session:
        service_connector = session.exec(
            select(ServiceConnectorSchema).where(
                ServiceConnectorSchema.id == service_connector_id
            )
        ).first()

        if service_connector is None:
            raise KeyError(
                f"Service connector with ID {service_connector_id} not "
                "found."
            )

        connector = service_connector.to_model()
        self._populate_connector_type(connector)
        return connector
get_service_connector_client(self, service_connector_id, resource_type=None, resource_id=None)

Get a service connector client for a service connector and given resource.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the base service connector to use.

required
resource_type Optional[str]

The type of resource to get a client for.

None
resource_id Optional[str]

The ID of the resource to get a client for.

None

Returns:

Type Description
ServiceConnectorResponseModel

A service connector client that can be used to access the given resource.

Source code in zenml/zen_stores/sql_zen_store.py
def get_service_connector_client(
    self,
    service_connector_id: UUID,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
) -> ServiceConnectorResponseModel:
    """Get a service connector client for a service connector and given resource.

    Args:
        service_connector_id: The ID of the base service connector to use.
        resource_type: The type of resource to get a client for.
        resource_id: The ID of the resource to get a client for.

    Returns:
        A service connector client that can be used to access the given
        resource.
    """
    connector = self.get_service_connector(service_connector_id)

    connector_instance = service_connector_registry.instantiate_connector(
        model=connector
    )

    # Fetch the connector client
    connector_client = connector_instance.get_connector_client(
        resource_type=resource_type,
        resource_id=resource_id,
    )

    # Return the model for the connector client
    connector = connector_client.to_response_model(
        user=connector.user,
        workspace=connector.workspace,
        is_shared=connector.is_shared,
        description=connector.description,
        labels=connector.labels,
    )

    self._populate_connector_type(connector)

    return connector
get_service_connector_type(self, connector_type)

Returns the requested service connector type.

Parameters:

Name Type Description Default
connector_type str

the service connector type identifier.

required

Returns:

Type Description
ServiceConnectorTypeModel

The requested service connector type.

Source code in zenml/zen_stores/sql_zen_store.py
def get_service_connector_type(
    self,
    connector_type: str,
) -> ServiceConnectorTypeModel:
    """Returns the requested service connector type.

    Args:
        connector_type: the service connector type identifier.

    Returns:
        The requested service connector type.
    """
    return service_connector_registry.get_service_connector_type(
        connector_type
    )
get_stack(self, stack_id)

Get a stack by its unique ID.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack to get.

required

Returns:

Type Description
StackResponseModel

The stack with the given ID.

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_stack(self, stack_id: UUID) -> StackResponseModel:
    """Get a stack by its unique ID.

    Args:
        stack_id: The ID of the stack to get.

    Returns:
        The stack with the given ID.

    Raises:
        KeyError: if the stack doesn't exist.
    """
    with Session(self.engine) as session:
        stack = session.exec(
            select(StackSchema).where(StackSchema.id == stack_id)
        ).first()

        if stack is None:
            raise KeyError(f"Stack with ID {stack_id} not found.")
        return stack.to_model()
get_stack_component(self, component_id)

Get a stack component by ID.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to get.

required

Returns:

Type Description
ComponentResponseModel

The stack component.

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_stack_component(
    self, component_id: UUID
) -> ComponentResponseModel:
    """Get a stack component by ID.

    Args:
        component_id: The ID of the stack component to get.

    Returns:
        The stack component.

    Raises:
        KeyError: if the stack component doesn't exist.
    """
    with Session(self.engine) as session:
        stack_component = session.exec(
            select(StackComponentSchema).where(
                StackComponentSchema.id == component_id
            )
        ).first()

        if stack_component is None:
            raise KeyError(
                f"Stack component with ID {component_id} not found."
            )

        return stack_component.to_model()
get_store_info(self)

Get information about the store.

Returns:

Type Description
ServerModel

Information about the store.

Exceptions:

Type Description
KeyError

If the deployment ID could not be loaded from the database.

Source code in zenml/zen_stores/sql_zen_store.py
def get_store_info(self) -> ServerModel:
    """Get information about the store.

    Returns:
        Information about the store.

    Raises:
        KeyError: If the deployment ID could not be loaded from the
            database.
    """
    model = super().get_store_info()
    sql_url = make_url(self.config.url)
    model.database_type = ServerDatabaseType(sql_url.drivername)

    # Fetch the deployment ID from the database and use it to replace the one
    # fetched from the global configuration
    with Session(self.engine) as session:
        identity = session.exec(select(IdentitySchema)).first()

        if identity is None:
            raise KeyError(
                "The deployment ID could not be loaded from the database."
            )
        model.id = identity.id
    return model
get_team(self, team_name_or_id)

Gets a specific team.

Parameters:

Name Type Description Default
team_name_or_id Union[str, uuid.UUID]

Name or ID of the team to get.

required

Returns:

Type Description
TeamResponseModel

The requested team.

Source code in zenml/zen_stores/sql_zen_store.py
def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
    """Gets a specific team.

    Args:
        team_name_or_id: Name or ID of the team to get.

    Returns:
        The requested team.
    """
    with Session(self.engine) as session:
        team = self._get_team_schema(team_name_or_id, session=session)
        return team.to_model()
get_team_role_assignment(self, team_role_assignment_id)

Gets a specific role assignment.

Parameters:

Name Type Description Default
team_role_assignment_id UUID

ID of the role assignment to get.

required

Returns:

Type Description
TeamRoleAssignmentResponseModel

The requested role assignment.

Exceptions:

Type Description
KeyError

If no role assignment with the given ID exists.

Source code in zenml/zen_stores/sql_zen_store.py
def get_team_role_assignment(
    self, team_role_assignment_id: UUID
) -> TeamRoleAssignmentResponseModel:
    """Gets a specific role assignment.

    Args:
        team_role_assignment_id: ID of the role assignment to get.

    Returns:
        The requested role assignment.

    Raises:
        KeyError: If no role assignment with the given ID exists.
    """
    with Session(self.engine) as session:
        team_role = session.exec(
            select(TeamRoleAssignmentSchema).where(
                TeamRoleAssignmentSchema.id == team_role_assignment_id
            )
        ).one_or_none()

        if team_role:
            return team_role.to_model()
        else:
            raise KeyError(
                f"Unable to get team role assignment with ID "
                f"'{team_role_assignment_id}': No team role assignment "
                f"with this ID found."
            )
get_user(self, user_name_or_id=None, include_private=False)

Gets a specific user, when no id is specified the active user is returned.

Raises a KeyError in case a user with that id does not exist.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

None
include_private bool

Whether to include private user information

False

Returns:

Type Description
UserResponseModel

The requested user, if it was found.

Source code in zenml/zen_stores/sql_zen_store.py
def get_user(
    self,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    include_private: bool = False,
) -> UserResponseModel:
    """Gets a specific user, when no id is specified the active user is returned.

    Raises a KeyError in case a user with that id does not exist.

    Args:
        user_name_or_id: The name or ID of the user to get.
        include_private: Whether to include private user information

    Returns:
        The requested user, if it was found.
    """
    if not user_name_or_id:
        user_name_or_id = self._default_user_name

    with Session(self.engine) as session:
        user = self._get_user_schema(user_name_or_id, session=session)

        return user.to_model(include_private=include_private)
get_user_role_assignment(self, user_role_assignment_id)

Gets a role assignment by ID.

Parameters:

Name Type Description Default
user_role_assignment_id UUID

ID of the role assignment to get.

required

Returns:

Type Description
UserRoleAssignmentResponseModel

The role assignment.

Exceptions:

Type Description
KeyError

If the role assignment does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_user_role_assignment(
    self, user_role_assignment_id: UUID
) -> UserRoleAssignmentResponseModel:
    """Gets a role assignment by ID.

    Args:
        user_role_assignment_id: ID of the role assignment to get.

    Returns:
        The role assignment.

    Raises:
        KeyError: If the role assignment does not exist.
    """
    with Session(self.engine) as session:
        user_role = session.exec(
            select(UserRoleAssignmentSchema).where(
                UserRoleAssignmentSchema.id == user_role_assignment_id
            )
        ).one_or_none()

        if user_role:
            return user_role.to_model()
        else:
            raise KeyError(
                f"Unable to get user role assignment with ID "
                f"'{user_role_assignment_id}': No user role assignment "
                f"with this ID found."
            )
get_workspace(self, workspace_name_or_id)

Get an existing workspace by name or ID.

Parameters:

Name Type Description Default
workspace_name_or_id Union[str, uuid.UUID]

Name or ID of the workspace to get.

required

Returns:

Type Description
WorkspaceResponseModel

The requested workspace if one was found.

Source code in zenml/zen_stores/sql_zen_store.py
def get_workspace(
    self, workspace_name_or_id: Union[str, UUID]
) -> WorkspaceResponseModel:
    """Get an existing workspace by name or ID.

    Args:
        workspace_name_or_id: Name or ID of the workspace to get.

    Returns:
        The requested workspace if one was found.
    """
    with Session(self.engine) as session:
        workspace = self._get_workspace_schema(
            workspace_name_or_id, session=session
        )
    return workspace.to_model()
list_artifacts(self, artifact_filter_model)

List all artifacts matching the given filter criteria.

Parameters:

Name Type Description Default
artifact_filter_model ArtifactFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ArtifactResponseModel]

A list of all artifacts matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_artifacts(
    self, artifact_filter_model: ArtifactFilterModel
) -> Page[ArtifactResponseModel]:
    """List all artifacts matching the given filter criteria.

    Args:
        artifact_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all artifacts matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(ArtifactSchema)
        if artifact_filter_model.only_unused:
            query = query.where(
                ArtifactSchema.id.notin_(  # type: ignore[attr-defined]
                    select(StepRunOutputArtifactSchema.artifact_id)
                )
            )
            query = query.where(
                ArtifactSchema.id.notin_(  # type: ignore[attr-defined]
                    select(StepRunInputArtifactSchema.artifact_id)
                )
            )
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=ArtifactSchema,
            filter_model=artifact_filter_model,
            custom_schema_to_model_conversion=self._artifact_schema_to_model,
        )
list_builds(self, build_filter_model)

List all builds matching the given filter criteria.

Parameters:

Name Type Description Default
build_filter_model PipelineBuildFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineBuildResponseModel]

A page of all builds matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_builds(
    self, build_filter_model: PipelineBuildFilterModel
) -> Page[PipelineBuildResponseModel]:
    """List all builds matching the given filter criteria.

    Args:
        build_filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all builds matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(PipelineBuildSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=PipelineBuildSchema,
            filter_model=build_filter_model,
        )
list_code_repositories(self, filter_model)

List all code repositories.

Parameters:

Name Type Description Default
filter_model CodeRepositoryFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[CodeRepositoryResponseModel]

A page of all code repositories.

Source code in zenml/zen_stores/sql_zen_store.py
def list_code_repositories(
    self, filter_model: CodeRepositoryFilterModel
) -> Page[CodeRepositoryResponseModel]:
    """List all code repositories.

    Args:
        filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all code repositories.
    """
    with Session(self.engine) as session:
        query = select(CodeRepositorySchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=CodeRepositorySchema,
            filter_model=filter_model,
        )
list_deployments(self, deployment_filter_model)

List all deployments matching the given filter criteria.

Parameters:

Name Type Description Default
deployment_filter_model PipelineDeploymentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineDeploymentResponseModel]

A page of all deployments matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_deployments(
    self, deployment_filter_model: PipelineDeploymentFilterModel
) -> Page[PipelineDeploymentResponseModel]:
    """List all deployments matching the given filter criteria.

    Args:
        deployment_filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all deployments matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(PipelineDeploymentSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=PipelineDeploymentSchema,
            filter_model=deployment_filter_model,
        )
list_flavors(self, flavor_filter_model)

List all stack component flavors matching the given filter criteria.

Parameters:

Name Type Description Default
flavor_filter_model FlavorFilterModel

All filter parameters including pagination params

required

Returns:

Type Description
Page[FlavorResponseModel]

List of all the stack component flavors matching the given criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_flavors(
    self, flavor_filter_model: FlavorFilterModel
) -> Page[FlavorResponseModel]:
    """List all stack component flavors matching the given filter criteria.

    Args:
        flavor_filter_model: All filter parameters including pagination
            params

    Returns:
        List of all the stack component flavors matching the given criteria.
    """
    with Session(self.engine) as session:
        query = select(FlavorSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=FlavorSchema,
            filter_model=flavor_filter_model,
        )
list_pipelines(self, pipeline_filter_model)

List all pipelines matching the given filter criteria.

Parameters:

Name Type Description Default
pipeline_filter_model PipelineFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineResponseModel]

A list of all pipelines matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_pipelines(
    self, pipeline_filter_model: PipelineFilterModel
) -> Page[PipelineResponseModel]:
    """List all pipelines matching the given filter criteria.

    Args:
        pipeline_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all pipelines matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(PipelineSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=PipelineSchema,
            filter_model=pipeline_filter_model,
        )
list_roles(self, role_filter_model)

List all roles matching the given filter criteria.

Parameters:

Name Type Description Default
role_filter_model RoleFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[RoleResponseModel]

A list of all roles matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_roles(
    self, role_filter_model: RoleFilterModel
) -> Page[RoleResponseModel]:
    """List all roles matching the given filter criteria.

    Args:
        role_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all roles matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(RoleSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=RoleSchema,
            filter_model=role_filter_model,
        )
list_run_metadata(self, run_metadata_filter_model)

List run metadata.

Parameters:

Name Type Description Default
run_metadata_filter_model RunMetadataFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[RunMetadataResponseModel]

The run metadata.

Source code in zenml/zen_stores/sql_zen_store.py
def list_run_metadata(
    self,
    run_metadata_filter_model: RunMetadataFilterModel,
) -> Page[RunMetadataResponseModel]:
    """List run metadata.

    Args:
        run_metadata_filter_model: All filter parameters including
            pagination params.

    Returns:
        The run metadata.
    """
    with Session(self.engine) as session:
        query = select(RunMetadataSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=RunMetadataSchema,
            filter_model=run_metadata_filter_model,
        )
list_run_steps(self, step_run_filter_model)

List all step runs matching the given filter criteria.

Parameters:

Name Type Description Default
step_run_filter_model StepRunFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[StepRunResponseModel]

A list of all step runs matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_run_steps(
    self, step_run_filter_model: StepRunFilterModel
) -> Page[StepRunResponseModel]:
    """List all step runs matching the given filter criteria.

    Args:
        step_run_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all step runs matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(StepRunSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=StepRunSchema,
            filter_model=step_run_filter_model,
            custom_schema_to_model_conversion=self._run_step_schema_to_model,
        )
list_runs(self, runs_filter_model)

List all pipeline runs matching the given filter criteria.

Parameters:

Name Type Description Default
runs_filter_model PipelineRunFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineRunResponseModel]

A list of all pipeline runs matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_runs(
    self, runs_filter_model: PipelineRunFilterModel
) -> Page[PipelineRunResponseModel]:
    """List all pipeline runs matching the given filter criteria.

    Args:
        runs_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all pipeline runs matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(PipelineRunSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=PipelineRunSchema,
            filter_model=runs_filter_model,
            custom_schema_to_model_conversion=self._run_schema_to_model,
        )
list_schedules(self, schedule_filter_model)

List all schedules in the workspace.

Parameters:

Name Type Description Default
schedule_filter_model ScheduleFilterModel

All filter parameters including pagination params

required

Returns:

Type Description
Page[ScheduleResponseModel]

A list of schedules.

Source code in zenml/zen_stores/sql_zen_store.py
def list_schedules(
    self, schedule_filter_model: ScheduleFilterModel
) -> Page[ScheduleResponseModel]:
    """List all schedules in the workspace.

    Args:
        schedule_filter_model: All filter parameters including pagination
            params

    Returns:
        A list of schedules.
    """
    with Session(self.engine) as session:
        query = select(ScheduleSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=ScheduleSchema,
            filter_model=schedule_filter_model,
        )
list_service_connector_resources(self, user_name_or_id, workspace_name_or_id, connector_type=None, resource_type=None, resource_id=None)

List resources that can be accessed by service connectors.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to scope to.

required
workspace_name_or_id Union[str, uuid.UUID]

The name or ID of the workspace to scope to.

required
connector_type Optional[str]

The type of service connector to scope to.

None
resource_type Optional[str]

The type of resource to scope to.

None
resource_id Optional[str]

The ID of the resource to scope to.

None

Returns:

Type Description
List[zenml.models.service_connector_models.ServiceConnectorResourcesModel]

The matching list of resources that available service connectors have access to.

Source code in zenml/zen_stores/sql_zen_store.py
def list_service_connector_resources(
    self,
    user_name_or_id: Union[str, UUID],
    workspace_name_or_id: Union[str, UUID],
    connector_type: Optional[str] = None,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
) -> List[ServiceConnectorResourcesModel]:
    """List resources that can be accessed by service connectors.

    Args:
        user_name_or_id: The name or ID of the user to scope to.
        workspace_name_or_id: The name or ID of the workspace to scope to.
        connector_type: The type of service connector to scope to.
        resource_type: The type of resource to scope to.
        resource_id: The ID of the resource to scope to.

    Returns:
        The matching list of resources that available service
        connectors have access to.
    """
    user = self.get_user(user_name_or_id)
    workspace = self.get_workspace(workspace_name_or_id)
    connector_filter_model = ServiceConnectorFilterModel(
        connector_type=connector_type,
        resource_type=resource_type,
        is_shared=True,
        workspace_id=workspace.id,
    )

    shared_connectors = self.list_service_connectors(
        filter_model=connector_filter_model
    ).items

    connector_filter_model = ServiceConnectorFilterModel(
        connector_type=connector_type,
        resource_type=resource_type,
        is_shared=False,
        user_id=user.id,
        workspace_id=workspace.id,
    )

    private_connectors = self.list_service_connectors(
        filter_model=connector_filter_model
    ).items

    resource_list: List[ServiceConnectorResourcesModel] = []

    for connector in list(shared_connectors) + list(private_connectors):
        if not service_connector_registry.is_registered(connector.type):
            # For connectors that we can instantiate, i.e. those that have a
            # connector type available locally, we return complete
            # information about the resources that they have access to.
            #
            # For those that are not locally available, we only return
            # rudimentary information extracted from the connector model
            # without actively trying to discover the resources that they
            # have access to.

            if resource_id and connector.resource_id != resource_id:
                # If an explicit resource ID is required, the connector
                # has to be configured with it.
                continue

            resources = (
                ServiceConnectorResourcesModel.from_connector_model(
                    connector,
                    resource_type=resource_type,
                )
            )
            for r in resources.resources:
                if not r.resource_ids:
                    r.error = (
                        f"The service '{connector.type}' connector type is "
                        "not available."
                    )

        else:
            try:
                connector_instance = (
                    service_connector_registry.instantiate_connector(
                        model=connector
                    )
                )

                resources = connector_instance.verify(
                    resource_type=resource_type,
                    resource_id=resource_id,
                    list_resources=True,
                )
            except (ValueError, AuthorizationException) as e:
                error = (
                    f'Failed to fetch {resource_type or "available"} '
                    f"resources from service connector {connector.name}/"
                    f"{connector.id}: {e}"
                )
                # Log an exception if debug logging is enabled
                if logger.isEnabledFor(logging.DEBUG):
                    logger.exception(error)
                else:
                    logger.error(error)
                continue

        resource_list.append(resources)

    return resource_list
list_service_connector_types(self, connector_type=None, resource_type=None, auth_method=None)

Get a list of service connector types.

Parameters:

Name Type Description Default
connector_type Optional[str]

Filter by connector type.

None
resource_type Optional[str]

Filter by resource type.

None
auth_method Optional[str]

Filter by authentication method.

None

Returns:

Type Description
List[zenml.models.service_connector_models.ServiceConnectorTypeModel]

List of service connector types.

Source code in zenml/zen_stores/sql_zen_store.py
def list_service_connector_types(
    self,
    connector_type: Optional[str] = None,
    resource_type: Optional[str] = None,
    auth_method: Optional[str] = None,
) -> List[ServiceConnectorTypeModel]:
    """Get a list of service connector types.

    Args:
        connector_type: Filter by connector type.
        resource_type: Filter by resource type.
        auth_method: Filter by authentication method.

    Returns:
        List of service connector types.
    """
    return service_connector_registry.list_service_connector_types(
        connector_type=connector_type,
        resource_type=resource_type,
        auth_method=auth_method,
    )
list_service_connectors(self, filter_model)

List all service connectors.

Parameters:

Name Type Description Default
filter_model ServiceConnectorFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ServiceConnectorResponseModel]

A page of all service connectors.

Source code in zenml/zen_stores/sql_zen_store.py
def list_service_connectors(
    self, filter_model: ServiceConnectorFilterModel
) -> Page[ServiceConnectorResponseModel]:
    """List all service connectors.

    Args:
        filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all service connectors.
    """

    def fetch_connectors(
        session: Session,
        query: Union[
            Select[ServiceConnectorSchema],
            SelectOfScalar[ServiceConnectorSchema],
        ],
        filter_model: BaseFilterModel,
    ) -> List[ServiceConnectorSchema]:
        """Custom fetch function for connector filtering and pagination.

        Applies resource type and label filters to the query.

        Args:
            session: The database session.
            query: The query to filter.
            filter_model: The filter model.

        Returns:
            The filtered and paginated results.
        """
        assert isinstance(filter_model, ServiceConnectorFilterModel)
        items = self._list_filtered_service_connectors(
            session=session, query=query, filter_model=filter_model
        )

        return items

    with Session(self.engine) as session:
        query = select(ServiceConnectorSchema)
        paged_connectors: Page[
            ServiceConnectorResponseModel
        ] = self.filter_and_paginate(
            session=session,
            query=query,
            table=ServiceConnectorSchema,
            filter_model=filter_model,
            custom_fetch=fetch_connectors,
        )

        self._populate_connector_type(*paged_connectors.items)
        return paged_connectors
list_stack_components(self, component_filter_model)

List all stack components matching the given filter criteria.

Parameters:

Name Type Description Default
component_filter_model ComponentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ComponentResponseModel]

A list of all stack components matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_stack_components(
    self, component_filter_model: ComponentFilterModel
) -> Page[ComponentResponseModel]:
    """List all stack components matching the given filter criteria.

    Args:
        component_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all stack components matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(StackComponentSchema)
        paged_components: Page[
            ComponentResponseModel
        ] = self.filter_and_paginate(
            session=session,
            query=query,
            table=StackComponentSchema,
            filter_model=component_filter_model,
        )
        return paged_components
list_stacks(self, stack_filter_model)

List all stacks matching the given filter criteria.

Parameters:

Name Type Description Default
stack_filter_model StackFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[StackResponseModel]

A list of all stacks matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_stacks(
    self, stack_filter_model: StackFilterModel
) -> Page[StackResponseModel]:
    """List all stacks matching the given filter criteria.

    Args:
        stack_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all stacks matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(StackSchema)
        if stack_filter_model.component_id:
            query = query.where(
                StackCompositionSchema.stack_id == StackSchema.id
            ).where(
                StackCompositionSchema.component_id
                == stack_filter_model.component_id
            )
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=StackSchema,
            filter_model=stack_filter_model,
        )
list_team_role_assignments(self, team_role_assignment_filter_model)

List all roles assignments matching the given filter criteria.

Parameters:

Name Type Description Default
team_role_assignment_filter_model TeamRoleAssignmentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[TeamRoleAssignmentResponseModel]

A list of all roles assignments matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_team_role_assignments(
    self, team_role_assignment_filter_model: TeamRoleAssignmentFilterModel
) -> Page[TeamRoleAssignmentResponseModel]:
    """List all roles assignments matching the given filter criteria.

    Args:
        team_role_assignment_filter_model: All filter parameters including
            pagination params.

    Returns:
        A list of all roles assignments matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(TeamRoleAssignmentSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=TeamRoleAssignmentSchema,
            filter_model=team_role_assignment_filter_model,
        )
list_teams(self, team_filter_model)

List all teams matching the given filter criteria.

Parameters:

Name Type Description Default
team_filter_model TeamFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[TeamResponseModel]

A list of all teams matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_teams(
    self, team_filter_model: TeamFilterModel
) -> Page[TeamResponseModel]:
    """List all teams matching the given filter criteria.

    Args:
        team_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all teams matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(TeamSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=TeamSchema,
            filter_model=team_filter_model,
        )
list_user_role_assignments(self, user_role_assignment_filter_model)

List all roles assignments matching the given filter criteria.

Parameters:

Name Type Description Default
user_role_assignment_filter_model UserRoleAssignmentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[UserRoleAssignmentResponseModel]

A list of all roles assignments matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_user_role_assignments(
    self, user_role_assignment_filter_model: UserRoleAssignmentFilterModel
) -> Page[UserRoleAssignmentResponseModel]:
    """List all roles assignments matching the given filter criteria.

    Args:
        user_role_assignment_filter_model: All filter parameters including
            pagination params.

    Returns:
        A list of all roles assignments matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(UserRoleAssignmentSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=UserRoleAssignmentSchema,
            filter_model=user_role_assignment_filter_model,
        )
list_users(self, user_filter_model)

List all users.

Parameters:

Name Type Description Default
user_filter_model UserFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[UserResponseModel]

A list of all users.

Source code in zenml/zen_stores/sql_zen_store.py
def list_users(
    self, user_filter_model: UserFilterModel
) -> Page[UserResponseModel]:
    """List all users.

    Args:
        user_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all users.
    """
    with Session(self.engine) as session:
        query = select(UserSchema)
        paged_user: Page[UserResponseModel] = self.filter_and_paginate(
            session=session,
            query=query,
            table=UserSchema,
            filter_model=user_filter_model,
        )
        return paged_user
list_workspaces(self, workspace_filter_model)

List all workspace matching the given filter criteria.

Parameters:

Name Type Description Default
workspace_filter_model WorkspaceFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[WorkspaceResponseModel]

A list of all workspace matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_workspaces(
    self, workspace_filter_model: WorkspaceFilterModel
) -> Page[WorkspaceResponseModel]:
    """List all workspace matching the given filter criteria.

    Args:
        workspace_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all workspace matching the filter criteria.
    """
    with Session(self.engine) as session:
        query = select(WorkspaceSchema)
        return self.filter_and_paginate(
            session=session,
            query=query,
            table=WorkspaceSchema,
            filter_model=workspace_filter_model,
        )
migrate_database(self)

Migrate the database to the head as defined by the python package.

Source code in zenml/zen_stores/sql_zen_store.py
def migrate_database(self) -> None:
    """Migrate the database to the head as defined by the python package."""
    alembic_logger = logging.getLogger("alembic")

    # remove all existing handlers
    while len(alembic_logger.handlers):
        alembic_logger.removeHandler(alembic_logger.handlers[0])

    logging_level = get_logging_level()

    # suppress alembic info logging if the zenml logging level is not debug
    if logging_level == LoggingLevels.DEBUG:
        alembic_logger.setLevel(logging.DEBUG)
    else:
        alembic_logger.setLevel(logging.WARNING)

    alembic_logger.addHandler(get_console_handler())

    # We need to account for 3 distinct cases here:
    # 1. the database is completely empty (not initialized)
    # 2. the database is not empty, but has never been migrated with alembic
    #   before (i.e. was created with SQLModel back when alembic wasn't
    #   used)
    # 3. the database is not empty and has been migrated with alembic before
    revisions = self.alembic.current_revisions()
    if len(revisions) >= 1:
        if len(revisions) > 1:
            logger.warning(
                "The ZenML database has more than one migration head "
                "revision. This is not expected and might indicate a "
                "database migration problem. Please raise an issue on "
                "GitHub if you encounter this."
            )
        # Case 3: the database has been migrated with alembic before. Just
        # upgrade to the latest revision.
        self.alembic.upgrade()
    else:
        if self.alembic.db_is_empty():
            # Case 1: the database is empty. We can just create the
            # tables from scratch with alembic.
            self.alembic.upgrade()
        else:
            # Case 2: the database is not empty, but has never been
            # migrated with alembic before. We need to create the alembic
            # version table, initialize it with the first revision where we
            # introduced alembic and then upgrade to the latest revision.
            self.alembic.stamp(ZENML_ALEMBIC_START_REVISION)
            self.alembic.upgrade()

    # If an alembic migration took place, all non-custom flavors are purged
    #  and the FlavorRegistry recreates all in-built and integration
    #  flavors in the db.
    revisions_afterwards = self.alembic.current_revisions()

    if revisions != revisions_afterwards:
        self._sync_flavors()
update_code_repository(self, code_repository_id, update)

Updates an existing code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to update.

required
update CodeRepositoryUpdateModel

The update to be applied to the code repository.

required

Returns:

Type Description
CodeRepositoryResponseModel

The updated code repository.

Exceptions:

Type Description
KeyError

If no code repository with the given name exists.

Source code in zenml/zen_stores/sql_zen_store.py
def update_code_repository(
    self, code_repository_id: UUID, update: CodeRepositoryUpdateModel
) -> CodeRepositoryResponseModel:
    """Updates an existing code repository.

    Args:
        code_repository_id: The ID of the code repository to update.
        update: The update to be applied to the code repository.

    Returns:
        The updated code repository.

    Raises:
        KeyError: If no code repository with the given name exists.
    """
    with Session(self.engine) as session:
        existing_repo = session.exec(
            select(CodeRepositorySchema).where(
                CodeRepositorySchema.id == code_repository_id
            )
        ).first()
        if existing_repo is None:
            raise KeyError(
                f"Unable to update code repository with ID "
                f"{code_repository_id}: No code repository with this ID "
                "found."
            )

        existing_repo.update(update)

        session.add(existing_repo)
        session.commit()

        return existing_repo.to_model()
update_flavor(self, flavor_id, flavor_update)

Updates an existing user.

Parameters:

Name Type Description Default
flavor_id UUID

The id of the flavor to update.

required
flavor_update FlavorUpdateModel

The update to be applied to the flavor.

required

Returns:

Type Description
FlavorResponseModel

The updated flavor.

Exceptions:

Type Description
KeyError

If no flavor with the given id exists.

Source code in zenml/zen_stores/sql_zen_store.py
def update_flavor(
    self, flavor_id: UUID, flavor_update: FlavorUpdateModel
) -> FlavorResponseModel:
    """Updates an existing user.

    Args:
        flavor_id: The id of the flavor to update.
        flavor_update: The update to be applied to the flavor.

    Returns:
        The updated flavor.

    Raises:
        KeyError: If no flavor with the given id exists.
    """
    with Session(self.engine) as session:
        existing_flavor = session.exec(
            select(FlavorSchema).where(FlavorSchema.id == flavor_id)
        ).first()

        if not existing_flavor:
            raise KeyError(f"Flavor with ID {flavor_id} not found.")

        existing_flavor.update(flavor_update=flavor_update)
        session.add(existing_flavor)
        session.commit()

        # Refresh the Model that was just created
        session.refresh(existing_flavor)
        return existing_flavor.to_model()
update_pipeline(*args, **kwargs)

Updates a pipeline.

Parameters:

Name Type Description Default
pipeline_id

The ID of the pipeline to be updated.

required
pipeline_update

The update to be applied.

required

Returns:

Type Description
Any

The updated pipeline.

Exceptions:

Type Description
KeyError

if the pipeline doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_role(*args, **kwargs)

Update an existing role.

Parameters:

Name Type Description Default
role_id

The ID of the role to be updated.

required
role_update

The update to be applied to the role.

required

Returns:

Type Description
Any

The updated role.

Exceptions:

Type Description
KeyError

if the role does not exist.

IllegalOperationError

if the role is a system role.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_run(self, run_id, run_update)

Updates a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to update.

required
run_update PipelineRunUpdateModel

The update to be applied to the pipeline run.

required

Returns:

Type Description
PipelineRunResponseModel

The updated pipeline run.

Exceptions:

Type Description
KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def update_run(
    self, run_id: UUID, run_update: PipelineRunUpdateModel
) -> PipelineRunResponseModel:
    """Updates a pipeline run.

    Args:
        run_id: The ID of the pipeline run to update.
        run_update: The update to be applied to the pipeline run.

    Returns:
        The updated pipeline run.

    Raises:
        KeyError: if the pipeline run doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if pipeline run with the given ID exists
        existing_run = session.exec(
            select(PipelineRunSchema).where(PipelineRunSchema.id == run_id)
        ).first()
        if existing_run is None:
            raise KeyError(
                f"Unable to update pipeline run with ID {run_id}: "
                f"No pipeline run with this ID found."
            )

        # Update the pipeline run
        existing_run.update(run_update=run_update)
        session.add(existing_run)
        session.commit()

        session.refresh(existing_run)
        return self._run_schema_to_model(existing_run)
update_run_step(self, step_run_id, step_run_update)

Updates a step run.

Parameters:

Name Type Description Default
step_run_id UUID

The ID of the step to update.

required
step_run_update StepRunUpdateModel

The update to be applied to the step.

required

Returns:

Type Description
StepRunResponseModel

The updated step run.

Exceptions:

Type Description
KeyError

if the step run doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def update_run_step(
    self,
    step_run_id: UUID,
    step_run_update: StepRunUpdateModel,
) -> StepRunResponseModel:
    """Updates a step run.

    Args:
        step_run_id: The ID of the step to update.
        step_run_update: The update to be applied to the step.

    Returns:
        The updated step run.

    Raises:
        KeyError: if the step run doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if the step exists
        existing_step_run = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == step_run_id)
        ).first()
        if existing_step_run is None:
            raise KeyError(
                f"Unable to update step with ID {step_run_id}: "
                f"No step with this ID found."
            )

        # Update the step
        existing_step_run.update(step_run_update)
        session.add(existing_step_run)

        # Update the output artifacts.
        for name, artifact_id in step_run_update.outputs.items():
            self._set_run_step_output_artifact(
                step_run_id=step_run_id,
                artifact_id=artifact_id,
                name=name,
                session=session,
            )

        # Input artifacts and parent steps cannot be updated after the
        # step has been created.

        session.commit()
        session.refresh(existing_step_run)

        return self._run_step_schema_to_model(existing_step_run)
update_schedule(self, schedule_id, schedule_update)

Updates a schedule.

Parameters:

Name Type Description Default
schedule_id UUID

The ID of the schedule to be updated.

required
schedule_update ScheduleUpdateModel

The update to be applied.

required

Returns:

Type Description
ScheduleResponseModel

The updated schedule.

Exceptions:

Type Description
KeyError

if the schedule doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def update_schedule(
    self,
    schedule_id: UUID,
    schedule_update: ScheduleUpdateModel,
) -> ScheduleResponseModel:
    """Updates a schedule.

    Args:
        schedule_id: The ID of the schedule to be updated.
        schedule_update: The update to be applied.

    Returns:
        The updated schedule.

    Raises:
        KeyError: if the schedule doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if schedule with the given ID exists
        existing_schedule = session.exec(
            select(ScheduleSchema).where(ScheduleSchema.id == schedule_id)
        ).first()
        if existing_schedule is None:
            raise KeyError(
                f"Unable to update schedule with ID {schedule_id}: "
                f"No schedule with this ID found."
            )

        # Update the schedule
        existing_schedule = existing_schedule.from_update_model(
            schedule_update
        )
        session.add(existing_schedule)
        session.commit()
        return existing_schedule.to_model()
update_service_connector(self, service_connector_id, update)

Updates an existing service connector.

The update model contains the fields to be updated. If a field value is set to None in the model, the field is not updated, but there are special rules concerning some fields:

  • the configuration and secrets fields together represent a full valid configuration update, not just a partial update. If either is set (i.e. not None) in the update, their values are merged together and will replace the existing configuration and secrets values.
  • the resource_id field value is also a full replacement value: if set to None, the resource ID is removed from the service connector.
  • the expiration_seconds field value is also a full replacement value: if set to None, the expiration is removed from the service connector.
  • the secret_id field value in the update is ignored, given that secrets are managed internally by the ZenML store.
  • the labels field is also a full labels update: if set (i.e. not None), all existing labels are removed and replaced by the new labels in the update.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to update.

required
update ServiceConnectorUpdateModel

The update to be applied to the service connector.

required

Returns:

Type Description
ServiceConnectorResponseModel

The updated service connector.

Exceptions:

Type Description
KeyError

If no service connector with the given ID exists.

IllegalOperationError

If the service connector is referenced by one or more stack components and the update would change the connector type, resource type or resource ID.

Source code in zenml/zen_stores/sql_zen_store.py
def update_service_connector(
    self, service_connector_id: UUID, update: ServiceConnectorUpdateModel
) -> ServiceConnectorResponseModel:
    """Updates an existing service connector.

    The update model contains the fields to be updated. If a field value is
    set to None in the model, the field is not updated, but there are
    special rules concerning some fields:

    * the `configuration` and `secrets` fields together represent a full
    valid configuration update, not just a partial update. If either is
    set (i.e. not None) in the update, their values are merged together and
    will replace the existing configuration and secrets values.
    * the `resource_id` field value is also a full replacement value: if set
    to `None`, the resource ID is removed from the service connector.
    * the `expiration_seconds` field value is also a full replacement value:
    if set to `None`, the expiration is removed from the service connector.
    * the `secret_id` field value in the update is ignored, given that
    secrets are managed internally by the ZenML store.
    * the `labels` field is also a full labels update: if set (i.e. not
    `None`), all existing labels are removed and replaced by the new labels
    in the update.

    Args:
        service_connector_id: The ID of the service connector to update.
        update: The update to be applied to the service connector.

    Returns:
        The updated service connector.

    Raises:
        KeyError: If no service connector with the given ID exists.
        IllegalOperationError: If the service connector is referenced by
            one or more stack components and the update would change the
            connector type, resource type or resource ID.
    """
    with Session(self.engine) as session:
        existing_connector = session.exec(
            select(ServiceConnectorSchema).where(
                ServiceConnectorSchema.id == service_connector_id
            )
        ).first()

        if existing_connector is None:
            raise KeyError(
                f"Unable to update service connector with ID "
                f"'{service_connector_id}': Found no existing service "
                "connector with this ID."
            )

        # In case of a renaming update, make sure no service connector uses
        # that name already
        if update.name:
            if (
                existing_connector.name != update.name
                and existing_connector.user_id is not None
            ):
                self._fail_if_service_connector_with_name_exists_for_user(
                    name=update.name,
                    workspace_id=existing_connector.workspace_id,
                    user_id=existing_connector.user_id,
                    session=session,
                )

        # Check if service connector update makes the service connector a
        # shared service connector
        # In that case, check if a service connector with the same name is
        # already shared within the workspace
        if update.is_shared is not None:
            if not existing_connector.is_shared and update.is_shared:
                self._fail_if_service_connector_with_name_already_shared(
                    name=update.name or existing_connector.name,
                    workspace_id=existing_connector.workspace_id,
                    session=session,
                )

        existing_connector_model = existing_connector.to_model()

        if len(existing_connector.components):
            # If the service connector is already used in one or more
            # stack components, the update is no longer allowed to change
            # the service connector's authentication method, connector type,
            # resource type, or resource ID
            if (
                update.connector_type
                and update.type != existing_connector_model.connector_type
            ):
                raise IllegalOperationError(
                    "The service type of a service connector that is "
                    "already actively used in one or more stack components "
                    "cannot be changed."
                )

            if (
                update.auth_method
                and update.auth_method
                != existing_connector_model.auth_method
            ):
                raise IllegalOperationError(
                    "The authentication method of a service connector that "
                    "is already actively used in one or more stack "
                    "components cannot be changed."
                )

            if (
                update.resource_types
                and update.resource_types
                != existing_connector_model.resource_types
            ):
                raise IllegalOperationError(
                    "The resource type of a service connector that is "
                    "already actively used in one or more stack components "
                    "cannot be changed."
                )

            # The resource ID field cannot be used as a partial update: if
            # set to None, the existing resource ID is also removed
            if update.resource_id != existing_connector_model.resource_id:
                raise IllegalOperationError(
                    "The resource ID of a service connector that is "
                    "already actively used in one or more stack components "
                    "cannot be changed."
                )

        # If the connector type is locally available, we validate the update
        # against the connector type schema before storing it in the
        # database
        if service_connector_registry.is_registered(
            existing_connector.connector_type
        ):
            connector_type = (
                service_connector_registry.get_service_connector_type(
                    existing_connector.connector_type
                )
            )
            # We need the auth method to be set to be able to validate the
            # configuration
            update.auth_method = (
                update.auth_method or existing_connector_model.auth_method
            )
            # Validate the configuration update. If the configuration or
            # secrets fields are set, together they are merged into a
            # full configuration that is validated against the connector
            # type schema and replaces the existing configuration and
            # secrets values
            update.validate_and_configure_resources(
                connector_type=connector_type,
                resource_types=update.resource_types,
                resource_id=update.resource_id,
                configuration=update.configuration,
                secrets=update.secrets,
            )

        # Update secret
        secret_id = self._update_connector_secret(
            existing_connector=existing_connector_model,
            updated_connector=update,
        )

        existing_connector.update(
            connector_update=update, secret_id=secret_id
        )
        session.add(existing_connector)
        session.commit()

        connector = existing_connector.to_model()
        self._populate_connector_type(connector)
        return connector
update_stack(*args, **kwargs)

Update a stack.

Parameters:

Name Type Description Default
stack_id

The ID of the stack update.

required
stack_update

The update request on the stack.

required

Returns:

Type Description
Any

The updated stack.

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

IllegalOperationError

if the stack is a default stack.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_stack_component(*args, **kwargs)

Update an existing stack component.

Parameters:

Name Type Description Default
component_id

The ID of the stack component to update.

required
component_update

The update to be applied to the stack component.

required

Returns:

Type Description
Any

The updated stack component.

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

IllegalOperationError

if the stack component is a default stack component.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_team(*args, **kwargs)

Update an existing team.

Parameters:

Name Type Description Default
team_id

The ID of the team to be updated.

required
team_update

The update to be applied to the team.

required

Returns:

Type Description
Any

The updated team.

Exceptions:

Type Description
KeyError

if the team does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_user(*args, **kwargs)

Updates an existing user.

Parameters:

Name Type Description Default
user_id

The id of the user to update.

required
user_update

The update to be applied to the user.

required

Returns:

Type Description
Any

The updated user.

Exceptions:

Type Description
IllegalOperationError

If the request tries to update the username for the default user account.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
update_workspace(*args, **kwargs)

Update an existing workspace.

Parameters:

Name Type Description Default
workspace_id

The ID of the workspace to be updated.

required
workspace_update

The update to be applied to the workspace.

required

Returns:

Type Description
Any

The updated workspace.

Exceptions:

Type Description
IllegalOperationError

if the workspace is the default workspace.

KeyError

if the workspace does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    with event_handler(event=event, v1=v1, v2=v2) as handler:
        try:
            if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
                handler.tracker = args[0]

            for obj in list(args) + list(kwargs.values()):
                if isinstance(obj, AnalyticsTrackedModelMixin):
                    handler.metadata = obj.get_analytics_metadata()
                    break
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        result = func(*args, **kwargs)

        try:
            if isinstance(result, AnalyticsTrackedModelMixin):
                handler.metadata = result.get_analytics_metadata()
        except Exception as e:
            logger.debug(f"Analytics tracking failure for {func}: {e}")

        return result
verify_service_connector(self, service_connector_id, resource_type=None, resource_id=None, list_resources=True)

Verifies if a service connector instance has access to one or more resources.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to verify.

required
resource_type Optional[str]

The type of resource to verify access to.

None
resource_id Optional[str]

The ID of the resource to verify access to.

None
list_resources bool

If True, the list of all resources accessible through the service connector and matching the supplied resource type and ID are returned.

True

Returns:

Type Description
ServiceConnectorResourcesModel

The list of resources that the service connector has access to, scoped to the supplied resource type and ID, if provided.

Source code in zenml/zen_stores/sql_zen_store.py
def verify_service_connector(
    self,
    service_connector_id: UUID,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
    list_resources: bool = True,
) -> ServiceConnectorResourcesModel:
    """Verifies if a service connector instance has access to one or more resources.

    Args:
        service_connector_id: The ID of the service connector to verify.
        resource_type: The type of resource to verify access to.
        resource_id: The ID of the resource to verify access to.
        list_resources: If True, the list of all resources accessible
            through the service connector and matching the supplied resource
            type and ID are returned.

    Returns:
        The list of resources that the service connector has access to,
        scoped to the supplied resource type and ID, if provided.
    """
    connector = self.get_service_connector(service_connector_id)

    connector_instance = service_connector_registry.instantiate_connector(
        model=connector
    )

    return connector_instance.verify(
        resource_type=resource_type,
        resource_id=resource_id,
        list_resources=list_resources,
    )
verify_service_connector_config(self, service_connector, list_resources=True)

Verifies if a service connector configuration has access to resources.

Parameters:

Name Type Description Default
service_connector ServiceConnectorRequestModel

The service connector configuration to verify.

required
list_resources bool

If True, the list of all resources accessible through the service connector is returned.

True

Returns:

Type Description
ServiceConnectorResourcesModel

The list of resources that the service connector configuration has access to.

Source code in zenml/zen_stores/sql_zen_store.py
def verify_service_connector_config(
    self,
    service_connector: ServiceConnectorRequestModel,
    list_resources: bool = True,
) -> ServiceConnectorResourcesModel:
    """Verifies if a service connector configuration has access to resources.

    Args:
        service_connector: The service connector configuration to verify.
        list_resources: If True, the list of all resources accessible
            through the service connector is returned.

    Returns:
        The list of resources that the service connector configuration has
        access to.
    """
    connector_instance = service_connector_registry.instantiate_connector(
        model=service_connector
    )
    return connector_instance.verify(list_resources=list_resources)

SqlZenStoreConfiguration (StoreConfiguration) pydantic-model

SQL ZenML store configuration.

Attributes:

Name Type Description
type StoreType

The type of the store.

secrets_store Optional[zenml.config.secrets_store_config.SecretsStoreConfiguration]

The configuration of the secrets store to use. This defaults to a SQL secrets store that extends the SQL ZenML store.

driver Optional[zenml.zen_stores.sql_zen_store.SQLDatabaseDriver]

The SQL database driver.

database Optional[str]

database name. If not already present on the server, it will be created automatically on first access.

username Optional[str]

The database username.

password Optional[str]

The database password.

ssl_ca Optional[str]

certificate authority certificate. Required for SSL enabled authentication if the CA certificate is not part of the certificates shipped by the operating system.

ssl_cert Optional[str]

client certificate. Required for SSL enabled authentication if client certificates are used.

ssl_key Optional[str]

client certificate private key. Required for SSL enabled if client certificates are used.

ssl_verify_server_cert bool

set to verify the identity of the server against the provided server certificate.

pool_size int

The maximum number of connections to keep in the SQLAlchemy pool.

max_overflow int

The maximum number of connections to allow in the SQLAlchemy pool in addition to the pool_size.

pool_pre_ping bool

Enable emitting a test statement on the SQL connection at the start of each connection pool checkout, to test that the database connection is still viable.

Source code in zenml/zen_stores/sql_zen_store.py
class SqlZenStoreConfiguration(StoreConfiguration):
    """SQL ZenML store configuration.

    Attributes:
        type: The type of the store.
        secrets_store: The configuration of the secrets store to use.
            This defaults to a SQL secrets store that extends the SQL ZenML
            store.
        driver: The SQL database driver.
        database: database name. If not already present on the server, it will
            be created automatically on first access.
        username: The database username.
        password: The database password.
        ssl_ca: certificate authority certificate. Required for SSL
            enabled authentication if the CA certificate is not part of the
            certificates shipped by the operating system.
        ssl_cert: client certificate. Required for SSL enabled
            authentication if client certificates are used.
        ssl_key: client certificate private key. Required for SSL
            enabled if client certificates are used.
        ssl_verify_server_cert: set to verify the identity of the server
            against the provided server certificate.
        pool_size: The maximum number of connections to keep in the SQLAlchemy
            pool.
        max_overflow: The maximum number of connections to allow in the
            SQLAlchemy pool in addition to the pool_size.
        pool_pre_ping: Enable emitting a test statement on the SQL connection
            at the start of each connection pool checkout, to test that the
            database connection is still viable.
    """

    type: StoreType = StoreType.SQL

    secrets_store: Optional[SecretsStoreConfiguration] = None

    driver: Optional[SQLDatabaseDriver] = None
    database: Optional[str] = None
    username: Optional[str] = None
    password: Optional[str] = None
    ssl_ca: Optional[str] = None
    ssl_cert: Optional[str] = None
    ssl_key: Optional[str] = None
    ssl_verify_server_cert: bool = False
    pool_size: int = 20
    max_overflow: int = 20
    pool_pre_ping: bool = True

    @validator("secrets_store")
    def validate_secrets_store(
        cls, secrets_store: Optional[SecretsStoreConfiguration]
    ) -> SecretsStoreConfiguration:
        """Ensures that the secrets store is initialized with a default SQL secrets store.

        Args:
            secrets_store: The secrets store config to be validated.

        Returns:
            The validated secrets store config.
        """
        if secrets_store is None:
            secrets_store = SqlSecretsStoreConfiguration()

        return secrets_store

    @root_validator(pre=True)
    def _remove_grpc_attributes(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        """Removes old GRPC attributes.

        Args:
            values: All model attribute values.

        Returns:
            The model attribute values
        """
        grpc_attribute_keys = [
            "grpc_metadata_host",
            "grpc_metadata_port",
            "grpc_metadata_ssl_ca",
            "grpc_metadata_ssl_key",
            "grpc_metadata_ssl_cert",
        ]
        grpc_values = [values.pop(key, None) for key in grpc_attribute_keys]
        if any(grpc_values):
            logger.warning(
                "The GRPC attributes %s are unused and will be removed soon. "
                "Please remove them from SQLZenStore configuration. This will "
                "become an error in future versions of ZenML."
            )

        return values

    @root_validator
    def _validate_url(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        """Validate the SQL URL.

        The validator also moves the MySQL username, password and database
        parameters from the URL into the other configuration arguments, if they
        are present in the URL.

        Args:
            values: The values to validate.

        Returns:
            The validated values.

        Raises:
            ValueError: If the URL is invalid or the SQL driver is not
                supported.
        """
        url = values.get("url")
        if url is None:
            return values

        # When running inside a container, if the URL uses localhost, the
        # target service will not be available. We try to replace localhost
        # with one of the special Docker or K3D internal hostnames.
        url = replace_localhost_with_internal_hostname(url)

        try:
            sql_url = make_url(url)
        except ArgumentError as e:
            raise ValueError(
                "Invalid SQL URL `%s`: %s. The URL must be in the format "
                "`driver://[[username:password@]hostname:port]/database["
                "?<extra-args>]`.",
                url,
                str(e),
            )

        if sql_url.drivername not in SQLDatabaseDriver.values():
            raise ValueError(
                "Invalid SQL driver value `%s`: The driver must be one of: %s.",
                url,
                ", ".join(SQLDatabaseDriver.values()),
            )
        values["driver"] = SQLDatabaseDriver(sql_url.drivername)
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            if (
                sql_url.username
                or sql_url.password
                or sql_url.query
                or sql_url.database is None
            ):
                raise ValueError(
                    "Invalid SQLite URL `%s`: The URL must be in the "
                    "format `sqlite:///path/to/database.db`.",
                    url,
                )
            if values.get("username") or values.get("password"):
                raise ValueError(
                    "Invalid SQLite configuration: The username and password "
                    "must not be set",
                    url,
                )
            values["database"] = sql_url.database
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            if sql_url.username:
                values["username"] = sql_url.username
                sql_url = sql_url._replace(username=None)
            if sql_url.password:
                values["password"] = sql_url.password
                sql_url = sql_url._replace(password=None)
            if sql_url.database:
                values["database"] = sql_url.database
                sql_url = sql_url._replace(database=None)
            if sql_url.query:
                for k, v in sql_url.query.items():
                    if k == "ssl_ca":
                        values["ssl_ca"] = v
                    elif k == "ssl_cert":
                        values["ssl_cert"] = v
                    elif k == "ssl_key":
                        values["ssl_key"] = v
                    elif k == "ssl_verify_server_cert":
                        values["ssl_verify_server_cert"] = v
                    else:
                        raise ValueError(
                            "Invalid MySQL URL query parameter `%s`: The "
                            "parameter must be one of: ssl_ca, ssl_cert, "
                            "ssl_key, or ssl_verify_server_cert.",
                            k,
                        )
                sql_url = sql_url._replace(query={})

            database = values.get("database")
            if (
                not values.get("username")
                or not values.get("password")
                or not database
            ):
                raise ValueError(
                    "Invalid MySQL configuration: The username, password and "
                    "database must be set in the URL or as configuration "
                    "attributes",
                )

            regexp = r"^[^\\/?%*:|\"<>.-]{1,64}$"
            match = re.match(regexp, database)
            if not match:
                raise ValueError(
                    f"The database name does not conform to the required "
                    f"format "
                    f"rules ({regexp}): {database}"
                )

            # Save the certificates in a secure location on disk
            secret_folder = Path(
                GlobalConfiguration().local_stores_path,
                "certificates",
            )
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                content = values.get(key)
                if content and not os.path.isfile(content):
                    fileio.makedirs(str(secret_folder))
                    file_path = Path(secret_folder, f"{key}.pem")
                    with open(file_path, "w") as f:
                        f.write(content)
                    file_path.chmod(0o600)
                    values[key] = str(file_path)

        values["url"] = str(sql_url)
        return values

    @staticmethod
    def get_local_url(path: str) -> str:
        """Get a local SQL url for a given local path.

        Args:
            path: The path to the local sqlite file.

        Returns:
            The local SQL url for the given path.
        """
        return f"sqlite:///{path}/{ZENML_SQLITE_DB_FILENAME}"

    @classmethod
    def supports_url_scheme(cls, url: str) -> bool:
        """Check if a URL scheme is supported by this store.

        Args:
            url: The URL to check.

        Returns:
            True if the URL scheme is supported, False otherwise.
        """
        return make_url(url).drivername in SQLDatabaseDriver.values()

    def expand_certificates(self) -> None:
        """Expands the certificates in the verify_ssl field."""
        # Load the certificate values back into the configuration
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            file_path = getattr(self, key, None)
            if file_path and os.path.isfile(file_path):
                with open(file_path, "r") as f:
                    setattr(self, key, f.read())

    @classmethod
    def copy_configuration(
        cls,
        config: "StoreConfiguration",
        config_path: str,
        load_config_path: Optional[PurePath] = None,
    ) -> "StoreConfiguration":
        """Copy the store config using a different configuration path.

        This method is used to create a copy of the store configuration that can
        be loaded using a different configuration path or in the context of a
        new environment, such as a container image.

        The configuration files accompanying the store configuration are also
        copied to the new configuration path (e.g. certificates etc.).

        Args:
            config: The store configuration to copy.
            config_path: new path where the configuration copy will be loaded
                from.
            load_config_path: absolute path that will be used to load the copied
                configuration. This can be set to a value different from
                `config_path` if the configuration copy will be loaded from
                a different environment, e.g. when the configuration is copied
                to a container image and loaded using a different absolute path.
                This will be reflected in the paths and URLs encoded in the
                copied configuration.

        Returns:
            A new store configuration object that reflects the new configuration
            path.
        """
        assert isinstance(config, SqlZenStoreConfiguration)
        config = config.copy()

        if config.driver == SQLDatabaseDriver.MYSQL:
            # Load the certificate values back into the configuration
            config.expand_certificates()

        elif config.driver == SQLDatabaseDriver.SQLITE:
            if load_config_path:
                config.url = cls.get_local_url(str(load_config_path))
            else:
                config.url = cls.get_local_url(config_path)

        return config

    def get_sqlmodel_config(
        self,
    ) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
        """Get the SQLModel engine configuration for the SQL ZenML store.

        Returns:
            The URL and connection arguments for the SQLModel engine.

        Raises:
            NotImplementedError: If the SQL driver is not supported.
        """
        sql_url = make_url(self.url)
        sqlalchemy_connect_args: Dict[str, Any] = {}
        engine_args = {}
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            assert self.database is not None
            # The following default value is needed for sqlite to avoid the
            # Error:
            #   sqlite3.ProgrammingError: SQLite objects created in a thread can
            #   only be used in that same thread.
            sqlalchemy_connect_args = {"check_same_thread": False}
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            # all these are guaranteed by our root validator
            assert self.database is not None
            assert self.username is not None
            assert self.password is not None
            assert sql_url.host is not None

            engine_args = {
                "pool_size": self.pool_size,
                "max_overflow": self.max_overflow,
                "pool_pre_ping": self.pool_pre_ping,
            }

            sql_url = sql_url._replace(
                drivername="mysql+pymysql",
                username=self.username,
                password=self.password,
                database=self.database,
            )

            sqlalchemy_ssl_args: Dict[str, Any] = {}

            # Handle SSL params
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                ssl_setting = getattr(self, key)
                if not ssl_setting:
                    continue
                if not os.path.isfile(ssl_setting):
                    logger.warning(
                        f"Database SSL setting `{key}` is not a file. "
                    )
                sqlalchemy_ssl_args[key.lstrip("ssl_")] = ssl_setting
            if len(sqlalchemy_ssl_args) > 0:
                sqlalchemy_ssl_args[
                    "check_hostname"
                ] = self.ssl_verify_server_cert
                sqlalchemy_connect_args["ssl"] = sqlalchemy_ssl_args
        else:
            raise NotImplementedError(
                f"SQL driver `{sql_url.drivername}` is not supported."
            )

        return str(sql_url), sqlalchemy_connect_args, engine_args

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the certificate attributes can be expanded to the contents
        # of the certificate files.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/sql_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the certificate attributes can be expanded to the contents
    # of the certificate files.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
copy_configuration(config, config_path, load_config_path=None) classmethod

Copy the store config using a different configuration path.

This method is used to create a copy of the store configuration that can be loaded using a different configuration path or in the context of a new environment, such as a container image.

The configuration files accompanying the store configuration are also copied to the new configuration path (e.g. certificates etc.).

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to copy.

required
config_path str

new path where the configuration copy will be loaded from.

required
load_config_path Optional[pathlib.PurePath]

absolute path that will be used to load the copied configuration. This can be set to a value different from config_path if the configuration copy will be loaded from a different environment, e.g. when the configuration is copied to a container image and loaded using a different absolute path. This will be reflected in the paths and URLs encoded in the copied configuration.

None

Returns:

Type Description
StoreConfiguration

A new store configuration object that reflects the new configuration path.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def copy_configuration(
    cls,
    config: "StoreConfiguration",
    config_path: str,
    load_config_path: Optional[PurePath] = None,
) -> "StoreConfiguration":
    """Copy the store config using a different configuration path.

    This method is used to create a copy of the store configuration that can
    be loaded using a different configuration path or in the context of a
    new environment, such as a container image.

    The configuration files accompanying the store configuration are also
    copied to the new configuration path (e.g. certificates etc.).

    Args:
        config: The store configuration to copy.
        config_path: new path where the configuration copy will be loaded
            from.
        load_config_path: absolute path that will be used to load the copied
            configuration. This can be set to a value different from
            `config_path` if the configuration copy will be loaded from
            a different environment, e.g. when the configuration is copied
            to a container image and loaded using a different absolute path.
            This will be reflected in the paths and URLs encoded in the
            copied configuration.

    Returns:
        A new store configuration object that reflects the new configuration
        path.
    """
    assert isinstance(config, SqlZenStoreConfiguration)
    config = config.copy()

    if config.driver == SQLDatabaseDriver.MYSQL:
        # Load the certificate values back into the configuration
        config.expand_certificates()

    elif config.driver == SQLDatabaseDriver.SQLITE:
        if load_config_path:
            config.url = cls.get_local_url(str(load_config_path))
        else:
            config.url = cls.get_local_url(config_path)

    return config
expand_certificates(self)

Expands the certificates in the verify_ssl field.

Source code in zenml/zen_stores/sql_zen_store.py
def expand_certificates(self) -> None:
    """Expands the certificates in the verify_ssl field."""
    # Load the certificate values back into the configuration
    for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
        file_path = getattr(self, key, None)
        if file_path and os.path.isfile(file_path):
            with open(file_path, "r") as f:
                setattr(self, key, f.read())
get_local_url(path) staticmethod

Get a local SQL url for a given local path.

Parameters:

Name Type Description Default
path str

The path to the local sqlite file.

required

Returns:

Type Description
str

The local SQL url for the given path.

Source code in zenml/zen_stores/sql_zen_store.py
@staticmethod
def get_local_url(path: str) -> str:
    """Get a local SQL url for a given local path.

    Args:
        path: The path to the local sqlite file.

    Returns:
        The local SQL url for the given path.
    """
    return f"sqlite:///{path}/{ZENML_SQLITE_DB_FILENAME}"
get_sqlmodel_config(self)

Get the SQLModel engine configuration for the SQL ZenML store.

Returns:

Type Description
Tuple[str, Dict[str, Any], Dict[str, Any]]

The URL and connection arguments for the SQLModel engine.

Exceptions:

Type Description
NotImplementedError

If the SQL driver is not supported.

Source code in zenml/zen_stores/sql_zen_store.py
def get_sqlmodel_config(
    self,
) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
    """Get the SQLModel engine configuration for the SQL ZenML store.

    Returns:
        The URL and connection arguments for the SQLModel engine.

    Raises:
        NotImplementedError: If the SQL driver is not supported.
    """
    sql_url = make_url(self.url)
    sqlalchemy_connect_args: Dict[str, Any] = {}
    engine_args = {}
    if sql_url.drivername == SQLDatabaseDriver.SQLITE:
        assert self.database is not None
        # The following default value is needed for sqlite to avoid the
        # Error:
        #   sqlite3.ProgrammingError: SQLite objects created in a thread can
        #   only be used in that same thread.
        sqlalchemy_connect_args = {"check_same_thread": False}
    elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
        # all these are guaranteed by our root validator
        assert self.database is not None
        assert self.username is not None
        assert self.password is not None
        assert sql_url.host is not None

        engine_args = {
            "pool_size": self.pool_size,
            "max_overflow": self.max_overflow,
            "pool_pre_ping": self.pool_pre_ping,
        }

        sql_url = sql_url._replace(
            drivername="mysql+pymysql",
            username=self.username,
            password=self.password,
            database=self.database,
        )

        sqlalchemy_ssl_args: Dict[str, Any] = {}

        # Handle SSL params
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            ssl_setting = getattr(self, key)
            if not ssl_setting:
                continue
            if not os.path.isfile(ssl_setting):
                logger.warning(
                    f"Database SSL setting `{key}` is not a file. "
                )
            sqlalchemy_ssl_args[key.lstrip("ssl_")] = ssl_setting
        if len(sqlalchemy_ssl_args) > 0:
            sqlalchemy_ssl_args[
                "check_hostname"
            ] = self.ssl_verify_server_cert
            sqlalchemy_connect_args["ssl"] = sqlalchemy_ssl_args
    else:
        raise NotImplementedError(
            f"SQL driver `{sql_url.drivername}` is not supported."
        )

    return str(sql_url), sqlalchemy_connect_args, engine_args
supports_url_scheme(url) classmethod

Check if a URL scheme is supported by this store.

Parameters:

Name Type Description Default
url str

The URL to check.

required

Returns:

Type Description
bool

True if the URL scheme is supported, False otherwise.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def supports_url_scheme(cls, url: str) -> bool:
    """Check if a URL scheme is supported by this store.

    Args:
        url: The URL to check.

    Returns:
        True if the URL scheme is supported, False otherwise.
    """
    return make_url(url).drivername in SQLDatabaseDriver.values()
validate_secrets_store(secrets_store) classmethod

Ensures that the secrets store is initialized with a default SQL secrets store.

Parameters:

Name Type Description Default
secrets_store Optional[zenml.config.secrets_store_config.SecretsStoreConfiguration]

The secrets store config to be validated.

required

Returns:

Type Description
SecretsStoreConfiguration

The validated secrets store config.

Source code in zenml/zen_stores/sql_zen_store.py
@validator("secrets_store")
def validate_secrets_store(
    cls, secrets_store: Optional[SecretsStoreConfiguration]
) -> SecretsStoreConfiguration:
    """Ensures that the secrets store is initialized with a default SQL secrets store.

    Args:
        secrets_store: The secrets store config to be validated.

    Returns:
        The validated secrets store config.
    """
    if secrets_store is None:
        secrets_store = SqlSecretsStoreConfiguration()

    return secrets_store

zen_store_interface

ZenML Store interface.

ZenStoreInterface (ABC)

ZenML store interface.

All ZenML stores must implement the methods in this interface.

The methods in this interface are organized in the following way:

  • they are grouped into categories based on the type of resource that they operate on (e.g. stacks, stack components, etc.)

  • each category has a set of CRUD methods (create, read, update, delete) that operate on the resources in that category. The order of the methods in each category should be:

  • create methods - store a new resource. These methods should fill in generated fields (e.g. UUIDs, creation timestamps) in the resource and return the updated resource.

  • get methods - retrieve a single existing resource identified by a unique key or identifier from the store. These methods should always return a resource and raise an exception if the resource does not exist.
  • list methods - retrieve a list of resources from the store. These methods should accept a set of filter parameters that can be used to filter the list of resources retrieved from the store.
  • update methods - update an existing resource in the store. These methods should expect the updated resource to be correctly identified by its unique key or identifier and raise an exception if the resource does not exist.
  • delete methods - delete an existing resource from the store. These methods should expect the resource to be correctly identified by its unique key or identifier. If the resource does not exist, an exception should be raised.

Best practices for implementing and keeping this interface clean and easy to maintain and extend:

  • keep methods organized by resource type and ordered by CRUD operation
  • for resources with multiple keys, don't implement multiple get or list methods here if the same functionality can be achieved by a single get or list method. Instead, implement them in the BaseZenStore class and have them call the generic get or list method in this interface.
  • keep the logic required to convert between ZenML domain Model classes and internal store representations outside the ZenML domain Model classes
  • methods for resources that have two or more unique keys (e.g. a Workspace is uniquely identified by its name as well as its UUID) should reflect that in the method variants and/or method arguments:
    • methods that take in a resource identifier as argument should accept all variants of the identifier (e.g. workspace_name_or_uuid for methods that get/list/update/delete Workspaces)
    • if a compound key is involved, separate get methods should be implemented (e.g. get_pipeline to get a pipeline by ID and get_pipeline_in_workspace to get a pipeline by its name and the ID of the workspace it belongs to)
  • methods for resources that are scoped as children of other resources (e.g. a Stack is always owned by a Workspace) should reflect the key(s) of the parent resource in the provided methods and method arguments:
    • create methods should take the parent resource UUID(s) as an argument (e.g. create_stack takes in the workspace ID)
    • get methods should be provided to retrieve a resource by the compound key that includes the parent resource key(s)
    • list methods should feature optional filter arguments that reflect the parent resource key(s)
Source code in zenml/zen_stores/zen_store_interface.py
class ZenStoreInterface(ABC):
    """ZenML store interface.

    All ZenML stores must implement the methods in this interface.

    The methods in this interface are organized in the following way:

     * they are grouped into categories based on the type of resource
       that they operate on (e.g. stacks, stack components, etc.)

     * each category has a set of CRUD methods (create, read, update, delete)
       that operate on the resources in that category. The order of the methods
       in each category should be:

       * create methods - store a new resource. These methods
         should fill in generated fields (e.g. UUIDs, creation timestamps) in
         the resource and return the updated resource.
       * get methods - retrieve a single existing resource identified by a
         unique key or identifier from the store. These methods should always
         return a resource and raise an exception if the resource does not
         exist.
       * list methods - retrieve a list of resources from the store. These
         methods should accept a set of filter parameters that can be used to
         filter the list of resources retrieved from the store.
       * update methods - update an existing resource in the store. These
         methods should expect the updated resource to be correctly identified
         by its unique key or identifier and raise an exception if the resource
         does not exist.
       * delete methods - delete an existing resource from the store. These
         methods should expect the resource to be correctly identified by its
         unique key or identifier. If the resource does not exist,
         an exception should be raised.

    Best practices for implementing and keeping this interface clean and easy to
    maintain and extend:

      * keep methods organized by resource type and ordered by CRUD operation
      * for resources with multiple keys, don't implement multiple get or list
      methods here if the same functionality can be achieved by a single get or
      list method. Instead, implement them in the BaseZenStore class and have
      them call the generic get or list method in this interface.
      * keep the logic required to convert between ZenML domain Model classes
      and internal store representations outside the ZenML domain Model classes
      * methods for resources that have two or more unique keys (e.g. a Workspace
      is uniquely identified by its name as well as its UUID) should reflect
      that in the method variants and/or method arguments:
        * methods that take in a resource identifier as argument should accept
        all variants of the identifier (e.g. `workspace_name_or_uuid` for methods
        that get/list/update/delete Workspaces)
        * if a compound key is involved, separate get methods should be
        implemented (e.g. `get_pipeline` to get a pipeline by ID and
        `get_pipeline_in_workspace` to get a pipeline by its name and the ID of
        the workspace it belongs to)
      * methods for resources that are scoped as children of other resources
      (e.g. a Stack is always owned by a Workspace) should reflect the
      key(s) of the parent resource in the provided methods and method
      arguments:
        * create methods should take the parent resource UUID(s) as an argument
        (e.g. `create_stack` takes in the workspace ID)
        * get methods should be provided to retrieve a resource by the compound
        key that includes the parent resource key(s)
        * list methods should feature optional filter arguments that reflect
        the parent resource key(s)
    """

    # ---------------------------------
    # Initialization and configuration
    # ---------------------------------

    @abstractmethod
    def _initialize(self) -> None:
        """Initialize the store.

        This method is called immediately after the store is created. It should
        be used to set up the backend (database, connection etc.).
        """

    @abstractmethod
    def get_store_info(self) -> ServerModel:
        """Get information about the store.

        Returns:
            Information about the store.
        """

    # ------
    # Stacks
    # ------

    @abstractmethod
    def create_stack(self, stack: StackRequestModel) -> StackResponseModel:
        """Create a new stack.

        Args:
            stack: The stack to create.

        Returns:
            The created stack.

        Raises:
            StackExistsError: If a stack with the same name is already owned
                by this user in this workspace.
        """

    @abstractmethod
    def get_stack(self, stack_id: UUID) -> StackResponseModel:
        """Get a stack by its unique ID.

        Args:
            stack_id: The ID of the stack to get.

        Returns:
            The stack with the given ID.

        Raises:
            KeyError: if the stack doesn't exist.
        """

    @abstractmethod
    def list_stacks(
        self, stack_filter_model: StackFilterModel
    ) -> Page[StackResponseModel]:
        """List all stacks matching the given filter criteria.

        Args:
            stack_filter_model: All filter parameters including pagination
                params

        Returns:
            A list of all stacks matching the filter criteria.
        """

    @abstractmethod
    def update_stack(
        self, stack_id: UUID, stack_update: StackUpdateModel
    ) -> StackResponseModel:
        """Update a stack.

        Args:
            stack_id: The ID of the stack update.
            stack_update: The update request on the stack.

        Returns:
            The updated stack.

        Raises:
            KeyError: if the stack doesn't exist.
        """

    @abstractmethod
    def delete_stack(self, stack_id: UUID) -> None:
        """Delete a stack.

        Args:
            stack_id: The ID of the stack to delete.

        Raises:
            KeyError: if the stack doesn't exist.
        """

    # ----------------
    # Stack components
    # ----------------

    @abstractmethod
    def create_stack_component(
        self, component: ComponentRequestModel
    ) -> ComponentResponseModel:
        """Create a stack component.

        Args:
            component: The stack component to create.

        Returns:
            The created stack component.

        Raises:
            StackComponentExistsError: If a stack component with the same name
                and type is already owned by this user in this workspace.
        """

    @abstractmethod
    def list_stack_components(
        self, component_filter_model: ComponentFilterModel
    ) -> Page[ComponentResponseModel]:
        """List all stack components matching the given filter criteria.

        Args:
            component_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all stack components matching the filter criteria.
        """

    @abstractmethod
    def get_stack_component(
        self, component_id: UUID
    ) -> ComponentResponseModel:
        """Get a stack component by ID.

        Args:
            component_id: The ID of the stack component to get.

        Returns:
            The stack component.

        Raises:
            KeyError: if the stack component doesn't exist.
        """

    @abstractmethod
    def update_stack_component(
        self,
        component_id: UUID,
        component_update: ComponentUpdateModel,
    ) -> ComponentResponseModel:
        """Update an existing stack component.

        Args:
            component_id: The ID of the stack component to update.
            component_update: The update to be applied to the stack component.

        Returns:
            The updated stack component.

        Raises:
            KeyError: if the stack component doesn't exist.
        """

    @abstractmethod
    def delete_stack_component(self, component_id: UUID) -> None:
        """Delete a stack component.

        Args:
            component_id: The ID of the stack component to delete.

        Raises:
            KeyError: if the stack component doesn't exist.
            ValueError: if the stack component is part of one or more stacks.
        """

    # -----------------------
    # Stack component flavors
    # -----------------------

    @abstractmethod
    def create_flavor(
        self,
        flavor: FlavorRequestModel,
    ) -> FlavorResponseModel:
        """Creates a new stack component flavor.

        Args:
            flavor: The stack component flavor to create.

        Returns:
            The newly created flavor.

        Raises:
            EntityExistsError: If a flavor with the same name and type
                is already owned by this user in this workspace.
        """

    @abstractmethod
    def update_flavor(
        self, flavor_id: UUID, flavor_update: FlavorUpdateModel
    ) -> FlavorResponseModel:
        """Updates an existing user.

        Args:
            flavor_id: The id of the flavor to update.
            flavor_update: The update to be applied to the flavor.

        Returns:
            The updated flavor.
        """

    @abstractmethod
    def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
        """Get a stack component flavor by ID.

        Args:
            flavor_id: The ID of the flavor to get.

        Returns:
            The stack component flavor.

        Raises:
            KeyError: if the stack component flavor doesn't exist.
        """

    @abstractmethod
    def list_flavors(
        self, flavor_filter_model: FlavorFilterModel
    ) -> Page[FlavorResponseModel]:
        """List all stack component flavors matching the given filter criteria.

        Args:
            flavor_filter_model: All filter parameters including pagination
                params.

        Returns:
            List of all the stack component flavors matching the given criteria.
        """

    @abstractmethod
    def delete_flavor(self, flavor_id: UUID) -> None:
        """Delete a stack component flavor.

        Args:
            flavor_id: The ID of the stack component flavor to delete.

        Raises:
            KeyError: if the stack component flavor doesn't exist.
        """

    # -----
    # Users
    # -----

    @abstractmethod
    def create_user(self, user: UserRequestModel) -> UserResponseModel:
        """Creates a new user.

        Args:
            user: User to be created.

        Returns:
            The newly created user.

        Raises:
            EntityExistsError: If a user with the given name already exists.
        """

    @abstractmethod
    def get_user(
        self,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        include_private: bool = False,
    ) -> UserResponseModel:
        """Gets a specific user, when no id is specified the active user is returned.

        Args:
            user_name_or_id: The name or ID of the user to get.
            include_private: Whether to include private user information

        Returns:
            The requested user, if it was found.

        Raises:
            KeyError: If no user with the given name or ID exists.
        """

    @abstractmethod
    def get_auth_user(
        self, user_name_or_id: Union[str, UUID]
    ) -> UserAuthModel:
        """Gets the auth model to a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Returns:
            The requested user, if it was found.
        """

    @abstractmethod
    def list_users(
        self, user_filter_model: UserFilterModel
    ) -> Page[UserResponseModel]:
        """List all users.

        Args:
            user_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all users.
        """

    @abstractmethod
    def update_user(
        self, user_id: UUID, user_update: UserUpdateModel
    ) -> UserResponseModel:
        """Updates an existing user.

        Args:
            user_id: The id of the user to update.
            user_update: The update to be applied to the user.

        Returns:
            The updated user.

        Raises:
            KeyError: If no user with the given name exists.
        """

    @abstractmethod
    def delete_user(self, user_name_or_id: Union[str, UUID]) -> None:
        """Deletes a user.

        Args:
            user_name_or_id: The name or ID of the user to delete.

        Raises:
            KeyError: If no user with the given ID exists.
        """

    # -----
    # Teams
    # -----

    @abstractmethod
    def create_team(self, team: TeamRequestModel) -> TeamResponseModel:
        """Creates a new team.

        Args:
            team: The team model to create.

        Returns:
            The newly created team.
        """

    @abstractmethod
    def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
        """Gets a specific team.

        Args:
            team_name_or_id: Name or ID of the team to get.

        Returns:
            The requested team.

        Raises:
            KeyError: If no team with the given name or ID exists.
        """

    @abstractmethod
    def list_teams(
        self, team_filter_model: TeamFilterModel
    ) -> Page[TeamResponseModel]:
        """List all teams matching the given filter criteria.

        Args:
            team_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all teams matching the filter criteria.
        """

    @abstractmethod
    def update_team(
        self, team_id: UUID, team_update: TeamUpdateModel
    ) -> TeamResponseModel:
        """Update an existing team.

        Args:
            team_id: The ID of the team to be updated.
            team_update: The update to be applied to the team.

        Returns:
            The updated team.

        Raises:
            KeyError: if the team does not exist.
        """

    @abstractmethod
    def delete_team(self, team_name_or_id: Union[str, UUID]) -> None:
        """Deletes a team.

        Args:
            team_name_or_id: Name or ID of the team to delete.

        Raises:
            KeyError: If no team with the given ID exists.
        """

    # -----
    # Roles
    # -----

    @abstractmethod
    def create_role(self, role: RoleRequestModel) -> RoleResponseModel:
        """Creates a new role.

        Args:
            role: The role model to create.

        Returns:
            The newly created role.

        Raises:
            EntityExistsError: If a role with the given name already exists.
        """

    @abstractmethod
    def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
        """Gets a specific role.

        Args:
            role_name_or_id: Name or ID of the role to get.

        Returns:
            The requested role.

        Raises:
            KeyError: If no role with the given name exists.
        """

    @abstractmethod
    def list_roles(
        self, role_filter_model: RoleFilterModel
    ) -> Page[RoleResponseModel]:
        """List all roles matching the given filter criteria.

        Args:
            role_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all roles matching the filter criteria.
        """

    @abstractmethod
    def update_role(
        self, role_id: UUID, role_update: RoleUpdateModel
    ) -> RoleResponseModel:
        """Update an existing role.

        Args:
            role_id: The ID of the role to be updated.
            role_update: The update to be applied to the role.

        Returns:
            The updated role.

        Raises:
            KeyError: if the role does not exist.
        """

    @abstractmethod
    def delete_role(self, role_name_or_id: Union[str, UUID]) -> None:
        """Deletes a role.

        Args:
            role_name_or_id: Name or ID of the role to delete.

        Raises:
            KeyError: If no role with the given ID exists.
        """

    # ---------------------
    # User Role assignments
    # ---------------------
    @abstractmethod
    def create_user_role_assignment(
        self, user_role_assignment: UserRoleAssignmentRequestModel
    ) -> UserRoleAssignmentResponseModel:
        """Creates a new role assignment.

        Args:
            user_role_assignment: The role assignment model to create.

        Returns:
            The newly created role assignment.
        """

    @abstractmethod
    def get_user_role_assignment(
        self, user_role_assignment_id: UUID
    ) -> UserRoleAssignmentResponseModel:
        """Gets a specific role assignment.

        Args:
            user_role_assignment_id: ID of the role assignment to get.

        Returns:
            The requested role assignment.

        Raises:
            KeyError: If no role assignment with the given ID exists.
        """

    @abstractmethod
    def delete_user_role_assignment(
        self, user_role_assignment_id: UUID
    ) -> None:
        """Delete a specific role assignment.

        Args:
            user_role_assignment_id: The ID of the specific role assignment
        """

    @abstractmethod
    def list_user_role_assignments(
        self, user_role_assignment_filter_model: UserRoleAssignmentFilterModel
    ) -> Page[UserRoleAssignmentResponseModel]:
        """List all roles assignments matching the given filter criteria.

        Args:
            user_role_assignment_filter_model: All filter parameters including
                pagination params.

        Returns:
            A list of all roles assignments matching the filter criteria.
        """

    # ---------------------
    # Team Role assignments
    # ---------------------
    @abstractmethod
    def create_team_role_assignment(
        self, team_role_assignment: TeamRoleAssignmentRequestModel
    ) -> TeamRoleAssignmentResponseModel:
        """Creates a new team role assignment.

        Args:
            team_role_assignment: The role assignment model to create.

        Returns:
            The newly created role assignment.
        """

    @abstractmethod
    def get_team_role_assignment(
        self, team_role_assignment_id: UUID
    ) -> TeamRoleAssignmentResponseModel:
        """Gets a specific role assignment.

        Args:
            team_role_assignment_id: ID of the role assignment to get.

        Returns:
            The requested role assignment.

        Raises:
            KeyError: If no role assignment with the given ID exists.
        """

    @abstractmethod
    def delete_team_role_assignment(
        self, team_role_assignment_id: UUID
    ) -> None:
        """Delete a specific role assignment.

        Args:
            team_role_assignment_id: The ID of the specific role assignment
        """

    @abstractmethod
    def list_team_role_assignments(
        self, team_role_assignment_filter_model: TeamRoleAssignmentFilterModel
    ) -> Page[TeamRoleAssignmentResponseModel]:
        """List all roles assignments matching the given filter criteria.

        Args:
            team_role_assignment_filter_model: All filter parameters including
                pagination params.

        Returns:
            A list of all roles assignments matching the filter criteria.
        """

    # --------
    # Workspaces
    # --------

    @abstractmethod
    def create_workspace(
        self, workspace: WorkspaceRequestModel
    ) -> WorkspaceResponseModel:
        """Creates a new workspace.

        Args:
            workspace: The workspace to create.

        Returns:
            The newly created workspace.

        Raises:
            EntityExistsError: If a workspace with the given name already exists.
        """

    @abstractmethod
    def get_workspace(
        self, workspace_name_or_id: Union[UUID, str]
    ) -> WorkspaceResponseModel:
        """Get an existing workspace by name or ID.

        Args:
            workspace_name_or_id: Name or ID of the workspace to get.

        Returns:
            The requested workspace.

        Raises:
            KeyError: If there is no such workspace.
        """

    @abstractmethod
    def list_workspaces(
        self, workspace_filter_model: WorkspaceFilterModel
    ) -> Page[WorkspaceResponseModel]:
        """List all workspace matching the given filter criteria.

        Args:
            workspace_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all workspace matching the filter criteria.
        """

    @abstractmethod
    def update_workspace(
        self, workspace_id: UUID, workspace_update: WorkspaceUpdateModel
    ) -> WorkspaceResponseModel:
        """Update an existing workspace.

        Args:
            workspace_id: The ID of the workspace to be updated.
            workspace_update: The update to be applied to the workspace.

        Returns:
            The updated workspace.

        Raises:
            KeyError: if the workspace does not exist.
        """

    @abstractmethod
    def delete_workspace(self, workspace_name_or_id: Union[str, UUID]) -> None:
        """Deletes a workspace.

        Args:
            workspace_name_or_id: Name or ID of the workspace to delete.

        Raises:
            KeyError: If no workspace with the given name exists.
        """

    # ---------
    # Pipelines
    # ---------

    @abstractmethod
    def create_pipeline(
        self,
        pipeline: PipelineRequestModel,
    ) -> PipelineResponseModel:
        """Creates a new pipeline in a workspace.

        Args:
            pipeline: The pipeline to create.

        Returns:
            The newly created pipeline.

        Raises:
            KeyError: if the workspace does not exist.
            EntityExistsError: If an identical pipeline already exists.
        """

    @abstractmethod
    def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
        """Get a pipeline with a given ID.

        Args:
            pipeline_id: ID of the pipeline.

        Returns:
            The pipeline.

        Raises:
            KeyError: if the pipeline does not exist.
        """

    @abstractmethod
    def list_pipelines(
        self, pipeline_filter_model: PipelineFilterModel
    ) -> Page[PipelineResponseModel]:
        """List all pipelines matching the given filter criteria.

        Args:
            pipeline_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all pipelines matching the filter criteria.
        """

    @abstractmethod
    def update_pipeline(
        self,
        pipeline_id: UUID,
        pipeline_update: PipelineUpdateModel,
    ) -> PipelineResponseModel:
        """Updates a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to be updated.
            pipeline_update: The update to be applied.

        Returns:
            The updated pipeline.

        Raises:
            KeyError: if the pipeline doesn't exist.
        """

    @abstractmethod
    def delete_pipeline(self, pipeline_id: UUID) -> None:
        """Deletes a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to delete.

        Raises:
            KeyError: if the pipeline doesn't exist.
        """

    # ---------
    # Builds
    # ---------

    @abstractmethod
    def create_build(
        self,
        build: PipelineBuildRequestModel,
    ) -> PipelineBuildResponseModel:
        """Creates a new build in a workspace.

        Args:
            build: The build to create.

        Returns:
            The newly created build.

        Raises:
            KeyError: If the workspace does not exist.
            EntityExistsError: If an identical build already exists.
        """

    @abstractmethod
    def get_build(self, build_id: UUID) -> PipelineBuildResponseModel:
        """Get a build with a given ID.

        Args:
            build_id: ID of the build.

        Returns:
            The build.

        Raises:
            KeyError: If the build does not exist.
        """

    @abstractmethod
    def list_builds(
        self, build_filter_model: PipelineBuildFilterModel
    ) -> Page[PipelineBuildResponseModel]:
        """List all builds matching the given filter criteria.

        Args:
            build_filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all builds matching the filter criteria.
        """

    @abstractmethod
    def delete_build(self, build_id: UUID) -> None:
        """Deletes a build.

        Args:
            build_id: The ID of the build to delete.

        Raises:
            KeyError: if the build doesn't exist.
        """

    # ----------------------
    # Pipeline Deployments
    # ----------------------

    @abstractmethod
    def create_deployment(
        self,
        deployment: PipelineDeploymentRequestModel,
    ) -> PipelineDeploymentResponseModel:
        """Creates a new deployment in a workspace.

        Args:
            deployment: The deployment to create.

        Returns:
            The newly created deployment.

        Raises:
            KeyError: If the workspace does not exist.
            EntityExistsError: If an identical deployment already exists.
        """

    @abstractmethod
    def get_deployment(
        self, deployment_id: UUID
    ) -> PipelineDeploymentResponseModel:
        """Get a deployment with a given ID.

        Args:
            deployment_id: ID of the deployment.

        Returns:
            The deployment.

        Raises:
            KeyError: If the deployment does not exist.
        """

    @abstractmethod
    def list_deployments(
        self, deployment_filter_model: PipelineDeploymentFilterModel
    ) -> Page[PipelineDeploymentResponseModel]:
        """List all deployments matching the given filter criteria.

        Args:
            deployment_filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all deployments matching the filter criteria.
        """

    @abstractmethod
    def delete_deployment(self, deployment_id: UUID) -> None:
        """Deletes a deployment.

        Args:
            deployment_id: The ID of the deployment to delete.

        Raises:
            KeyError: If the deployment doesn't exist.
        """

    # ---------
    # Schedules
    # ---------

    @abstractmethod
    def create_schedule(
        self, schedule: ScheduleRequestModel
    ) -> ScheduleResponseModel:
        """Creates a new schedule.

        Args:
            schedule: The schedule to create.

        Returns:
            The newly created schedule.
        """

    @abstractmethod
    def get_schedule(self, schedule_id: UUID) -> ScheduleResponseModel:
        """Get a schedule with a given ID.

        Args:
            schedule_id: ID of the schedule.

        Returns:
            The schedule.

        Raises:
            KeyError: if the schedule does not exist.
        """

    @abstractmethod
    def list_schedules(
        self, schedule_filter_model: ScheduleFilterModel
    ) -> Page[ScheduleResponseModel]:
        """List all schedules in the workspace.

        Args:
            schedule_filter_model: All filter parameters including pagination
                params

        Returns:
            A list of schedules.
        """

    @abstractmethod
    def update_schedule(
        self,
        schedule_id: UUID,
        schedule_update: ScheduleUpdateModel,
    ) -> ScheduleResponseModel:
        """Updates a schedule.

        Args:
            schedule_id: The ID of the schedule to be updated.
            schedule_update: The update to be applied.

        Returns:
            The updated schedule.

        Raises:
            KeyError: if the schedule doesn't exist.
        """

    @abstractmethod
    def delete_schedule(self, schedule_id: UUID) -> None:
        """Deletes a schedule.

        Args:
            schedule_id: The ID of the schedule to delete.

        Raises:
            KeyError: if the schedule doesn't exist.
        """

    # --------------
    # Pipeline runs
    # --------------

    @abstractmethod
    def create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Creates a pipeline run.

        Args:
            pipeline_run: The pipeline run to create.

        Returns:
            The created pipeline run.

        Raises:
            EntityExistsError: If an identical pipeline run already exists.
            KeyError: If the pipeline does not exist.
        """

    @abstractmethod
    def get_run(
        self, run_name_or_id: Union[str, UUID]
    ) -> PipelineRunResponseModel:
        """Gets a pipeline run.

        Args:
            run_name_or_id: The name or ID of the pipeline run to get.

        Returns:
            The pipeline run.

        Raises:
            KeyError: if the pipeline run doesn't exist.
        """

    @abstractmethod
    def get_or_create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> Tuple[PipelineRunResponseModel, bool]:
        """Gets or creates a pipeline run.

        If a run with the same ID or name already exists, it is returned.
        Otherwise, a new run is created.

        Args:
            pipeline_run: The pipeline run to get or create.

        Returns:
            The pipeline run, and a boolean indicating whether the run was
            created or not.
        """

    @abstractmethod
    def list_runs(
        self, runs_filter_model: PipelineRunFilterModel
    ) -> Page[PipelineRunResponseModel]:
        """List all pipeline runs matching the given filter criteria.

        Args:
            runs_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all pipeline runs matching the filter criteria.
        """

    @abstractmethod
    def update_run(
        self, run_id: UUID, run_update: PipelineRunUpdateModel
    ) -> PipelineRunResponseModel:
        """Updates a pipeline run.

        Args:
            run_id: The ID of the pipeline run to update.
            run_update: The update to be applied to the pipeline run.

        Returns:
            The updated pipeline run.

        Raises:
            KeyError: if the pipeline run doesn't exist.
        """

    @abstractmethod
    def delete_run(self, run_id: UUID) -> None:
        """Deletes a pipeline run.

        Args:
            run_id: The ID of the pipeline run to delete.

        Raises:
            KeyError: if the pipeline run doesn't exist.
        """

    # ------------------
    # Pipeline run steps
    # ------------------

    @abstractmethod
    def create_run_step(
        self, step_run: StepRunRequestModel
    ) -> StepRunResponseModel:
        """Creates a step run.

        Args:
            step_run: The step run to create.

        Returns:
            The created step run.

        Raises:
            EntityExistsError: if the step run already exists.
            KeyError: if the pipeline run doesn't exist.
        """

    @abstractmethod
    def get_run_step(self, step_run_id: UUID) -> StepRunResponseModel:
        """Get a step run by ID.

        Args:
            step_run_id: The ID of the step run to get.

        Returns:
            The step run.

        Raises:
            KeyError: if the step run doesn't exist.
        """

    @abstractmethod
    def list_run_steps(
        self, step_run_filter_model: StepRunFilterModel
    ) -> Page[StepRunResponseModel]:
        """List all step runs matching the given filter criteria.

        Args:
            step_run_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all step runs matching the filter criteria.
        """

    @abstractmethod
    def update_run_step(
        self,
        step_run_id: UUID,
        step_run_update: StepRunUpdateModel,
    ) -> StepRunResponseModel:
        """Updates a step run.

        Args:
            step_run_id: The ID of the step to update.
            step_run_update: The update to be applied to the step.

        Returns:
            The updated step run.

        Raises:
            KeyError: if the step run doesn't exist.
        """

    # ---------
    # Artifacts
    # ---------

    @abstractmethod
    def create_artifact(
        self, artifact: ArtifactRequestModel
    ) -> ArtifactResponseModel:
        """Creates an artifact.

        Args:
            artifact: The artifact to create.

        Returns:
            The created artifact.
        """

    @abstractmethod
    def get_artifact(self, artifact_id: UUID) -> ArtifactResponseModel:
        """Gets an artifact.

        Args:
            artifact_id: The ID of the artifact to get.

        Returns:
            The artifact.

        Raises:
            KeyError: if the artifact doesn't exist.
        """

    @abstractmethod
    def list_artifacts(
        self, artifact_filter_model: ArtifactFilterModel
    ) -> Page[ArtifactResponseModel]:
        """List all artifacts matching the given filter criteria.

        Args:
            artifact_filter_model: All filter parameters including pagination
                params.

        Returns:
            A list of all artifacts matching the filter criteria.
        """

    @abstractmethod
    def delete_artifact(self, artifact_id: UUID) -> None:
        """Deletes an artifact.

        Args:
            artifact_id: The ID of the artifact to delete.

        Raises:
            KeyError: if the artifact doesn't exist.
        """

    # ------------
    # Run Metadata
    # ------------

    @abstractmethod
    def create_run_metadata(
        self, run_metadata: RunMetadataRequestModel
    ) -> RunMetadataResponseModel:
        """Creates run metadata.

        Args:
            run_metadata: The run metadata to create.

        Returns:
            The created run metadata.
        """

    @abstractmethod
    def list_run_metadata(
        self,
        run_metadata_filter_model: RunMetadataFilterModel,
    ) -> Page[RunMetadataResponseModel]:
        """List run metadata.

        Args:
            run_metadata_filter_model: All filter parameters including
                pagination params.

        Returns:
            The run metadata.
        """

    # -----------------
    # Code Repositories
    # -----------------

    @abstractmethod
    def create_code_repository(
        self, code_repository: CodeRepositoryRequestModel
    ) -> CodeRepositoryResponseModel:
        """Creates a new code repository.

        Args:
            code_repository: Code repository to be created.

        Returns:
            The newly created code repository.

        Raises:
            EntityExistsError: If a code repository with the given name already
                exists.
        """

    @abstractmethod
    def get_code_repository(
        self, code_repository_id: UUID
    ) -> CodeRepositoryResponseModel:
        """Gets a specific code repository.

        Args:
            code_repository_id: The ID of the code repository to get.

        Returns:
            The requested code repository, if it was found.

        Raises:
            KeyError: If no code repository with the given ID exists.
        """

    @abstractmethod
    def list_code_repositories(
        self, filter_model: CodeRepositoryFilterModel
    ) -> Page[CodeRepositoryResponseModel]:
        """List all code repositories.

        Args:
            filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all code repositories.
        """

    @abstractmethod
    def update_code_repository(
        self, code_repository_id: UUID, update: CodeRepositoryUpdateModel
    ) -> CodeRepositoryResponseModel:
        """Updates an existing code repository.

        Args:
            code_repository_id: The ID of the code repository to update.
            update: The update to be applied to the code repository.

        Returns:
            The updated code repository.

        Raises:
            KeyError: If no code repository with the given name exists.
        """

    @abstractmethod
    def delete_code_repository(self, code_repository_id: UUID) -> None:
        """Deletes a code repository.

        Args:
            code_repository_id: The ID of the code repository to delete.

        Raises:
            KeyError: If no code repository with the given ID exists.
        """

    # ------------------
    # Service Connectors
    # ------------------

    @abstractmethod
    def create_service_connector(
        self,
        service_connector: ServiceConnectorRequestModel,
    ) -> ServiceConnectorResponseModel:
        """Creates a new service connector.

        Args:
            service_connector: Service connector to be created.

        Returns:
            The newly created service connector.

        Raises:
            EntityExistsError: If a service connector with the given name
                is already owned by this user in this workspace.
        """

    @abstractmethod
    def get_service_connector(
        self, service_connector_id: UUID
    ) -> ServiceConnectorResponseModel:
        """Gets a specific service connector.

        Args:
            service_connector_id: The ID of the service connector to get.

        Returns:
            The requested service connector, if it was found.

        Raises:
            KeyError: If no service connector with the given ID exists.
        """

    @abstractmethod
    def list_service_connectors(
        self, filter_model: ServiceConnectorFilterModel
    ) -> Page[ServiceConnectorResponseModel]:
        """List all service connectors.

        Args:
            filter_model: All filter parameters including pagination
                params.

        Returns:
            A page of all service connectors.
        """

    @abstractmethod
    def update_service_connector(
        self, service_connector_id: UUID, update: ServiceConnectorUpdateModel
    ) -> ServiceConnectorResponseModel:
        """Updates an existing service connector.

        The update model contains the fields to be updated. If a field value is
        set to None in the model, the field is not updated, but there are
        special rules concerning some fields:

        * the `configuration` and `secrets` fields together represent a full
        valid configuration update, not just a partial update. If either is
        set (i.e. not None) in the update, their values are merged together and
        will replace the existing configuration and secrets values.
        * the `resource_id` field value is also a full replacement value: if set
        to `None`, the resource ID is removed from the service connector.
        * the `expiration_seconds` field value is also a full replacement value:
        if set to `None`, the expiration is removed from the service connector.
        * the `secret_id` field value in the update is ignored, given that
        secrets are managed internally by the ZenML store.
        * the `labels` field is also a full labels update: if set (i.e. not
        `None`), all existing labels are removed and replaced by the new labels
        in the update.

        Args:
            service_connector_id: The ID of the service connector to update.
            update: The update to be applied to the service connector.

        Returns:
            The updated service connector.

        Raises:
            KeyError: If no service connector with the given name exists.
        """

    @abstractmethod
    def delete_service_connector(self, service_connector_id: UUID) -> None:
        """Deletes a service connector.

        Args:
            service_connector_id: The ID of the service connector to delete.

        Raises:
            KeyError: If no service connector with the given ID exists.
        """

    @abstractmethod
    def verify_service_connector_config(
        self,
        service_connector: ServiceConnectorRequestModel,
        list_resources: bool = True,
    ) -> ServiceConnectorResourcesModel:
        """Verifies if a service connector configuration has access to resources.

        Args:
            service_connector: The service connector configuration to verify.
            list_resources: If True, the list of all resources accessible
                through the service connector is returned.

        Returns:
            The list of resources that the service connector configuration has
            access to.

        Raises:
            NotImplementError: If the service connector cannot be verified
                on the store e.g. due to missing package dependencies.
        """

    @abstractmethod
    def verify_service_connector(
        self,
        service_connector_id: UUID,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
        list_resources: bool = True,
    ) -> ServiceConnectorResourcesModel:
        """Verifies if a service connector instance has access to one or more resources.

        Args:
            service_connector_id: The ID of the service connector to verify.
            resource_type: The type of resource to verify access to.
            resource_id: The ID of the resource to verify access to.
            list_resources: If True, the list of all resources accessible
                through the service connector and matching the supplied resource
                type and ID are returned.

        Returns:
            The list of resources that the service connector has access to,
            scoped to the supplied resource type and ID, if provided.

        Raises:
            KeyError: If no service connector with the given name exists.
            NotImplementError: If the service connector cannot be verified
                e.g. due to missing package dependencies.
        """

    @abstractmethod
    def get_service_connector_client(
        self,
        service_connector_id: UUID,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
    ) -> ServiceConnectorResponseModel:
        """Get a service connector client for a service connector and given resource.

        Args:
            service_connector_id: The ID of the base service connector to use.
            resource_type: The type of resource to get a client for.
            resource_id: The ID of the resource to get a client for.

        Returns:
            A service connector client that can be used to access the given
            resource.

        Raises:
            KeyError: If no service connector with the given name exists.
            NotImplementError: If the service connector cannot be instantiated
                on the store e.g. due to missing package dependencies.
        """

    @abstractmethod
    def list_service_connector_resources(
        self,
        user_name_or_id: Union[str, UUID],
        workspace_name_or_id: Union[str, UUID],
        connector_type: Optional[str] = None,
        resource_type: Optional[str] = None,
        resource_id: Optional[str] = None,
    ) -> List[ServiceConnectorResourcesModel]:
        """List resources that can be accessed by service connectors.

        Args:
            user_name_or_id: The name or ID of the user to scope to.
            workspace_name_or_id: The name or ID of the workspace to scope to.
            connector_type: The type of service connector to scope to.
            resource_type: The type of resource to scope to.
            resource_id: The ID of the resource to scope to.

        Returns:
            The matching list of resources that available service
            connectors have access to.
        """

    @abstractmethod
    def list_service_connector_types(
        self,
        connector_type: Optional[str] = None,
        resource_type: Optional[str] = None,
        auth_method: Optional[str] = None,
    ) -> List[ServiceConnectorTypeModel]:
        """Get a list of service connector types.

        Args:
            connector_type: Filter by connector type.
            resource_type: Filter by resource type.
            auth_method: Filter by authentication method.

        Returns:
            List of service connector types.
        """

    @abstractmethod
    def get_service_connector_type(
        self,
        connector_type: str,
    ) -> ServiceConnectorTypeModel:
        """Returns the requested service connector type.

        Args:
            connector_type: the service connector type identifier.

        Returns:
            The requested service connector type.

        Raises:
            KeyError: If no service connector type with the given ID exists.
        """
create_artifact(self, artifact)

Creates an artifact.

Parameters:

Name Type Description Default
artifact ArtifactRequestModel

The artifact to create.

required

Returns:

Type Description
ArtifactResponseModel

The created artifact.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_artifact(
    self, artifact: ArtifactRequestModel
) -> ArtifactResponseModel:
    """Creates an artifact.

    Args:
        artifact: The artifact to create.

    Returns:
        The created artifact.
    """
create_build(self, build)

Creates a new build in a workspace.

Parameters:

Name Type Description Default
build PipelineBuildRequestModel

The build to create.

required

Returns:

Type Description
PipelineBuildResponseModel

The newly created build.

Exceptions:

Type Description
KeyError

If the workspace does not exist.

EntityExistsError

If an identical build already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_build(
    self,
    build: PipelineBuildRequestModel,
) -> PipelineBuildResponseModel:
    """Creates a new build in a workspace.

    Args:
        build: The build to create.

    Returns:
        The newly created build.

    Raises:
        KeyError: If the workspace does not exist.
        EntityExistsError: If an identical build already exists.
    """
create_code_repository(self, code_repository)

Creates a new code repository.

Parameters:

Name Type Description Default
code_repository CodeRepositoryRequestModel

Code repository to be created.

required

Returns:

Type Description
CodeRepositoryResponseModel

The newly created code repository.

Exceptions:

Type Description
EntityExistsError

If a code repository with the given name already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_code_repository(
    self, code_repository: CodeRepositoryRequestModel
) -> CodeRepositoryResponseModel:
    """Creates a new code repository.

    Args:
        code_repository: Code repository to be created.

    Returns:
        The newly created code repository.

    Raises:
        EntityExistsError: If a code repository with the given name already
            exists.
    """
create_deployment(self, deployment)

Creates a new deployment in a workspace.

Parameters:

Name Type Description Default
deployment PipelineDeploymentRequestModel

The deployment to create.

required

Returns:

Type Description
PipelineDeploymentResponseModel

The newly created deployment.

Exceptions:

Type Description
KeyError

If the workspace does not exist.

EntityExistsError

If an identical deployment already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_deployment(
    self,
    deployment: PipelineDeploymentRequestModel,
) -> PipelineDeploymentResponseModel:
    """Creates a new deployment in a workspace.

    Args:
        deployment: The deployment to create.

    Returns:
        The newly created deployment.

    Raises:
        KeyError: If the workspace does not exist.
        EntityExistsError: If an identical deployment already exists.
    """
create_flavor(self, flavor)

Creates a new stack component flavor.

Parameters:

Name Type Description Default
flavor FlavorRequestModel

The stack component flavor to create.

required

Returns:

Type Description
FlavorResponseModel

The newly created flavor.

Exceptions:

Type Description
EntityExistsError

If a flavor with the same name and type is already owned by this user in this workspace.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_flavor(
    self,
    flavor: FlavorRequestModel,
) -> FlavorResponseModel:
    """Creates a new stack component flavor.

    Args:
        flavor: The stack component flavor to create.

    Returns:
        The newly created flavor.

    Raises:
        EntityExistsError: If a flavor with the same name and type
            is already owned by this user in this workspace.
    """
create_pipeline(self, pipeline)

Creates a new pipeline in a workspace.

Parameters:

Name Type Description Default
pipeline PipelineRequestModel

The pipeline to create.

required

Returns:

Type Description
PipelineResponseModel

The newly created pipeline.

Exceptions:

Type Description
KeyError

if the workspace does not exist.

EntityExistsError

If an identical pipeline already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_pipeline(
    self,
    pipeline: PipelineRequestModel,
) -> PipelineResponseModel:
    """Creates a new pipeline in a workspace.

    Args:
        pipeline: The pipeline to create.

    Returns:
        The newly created pipeline.

    Raises:
        KeyError: if the workspace does not exist.
        EntityExistsError: If an identical pipeline already exists.
    """
create_role(self, role)

Creates a new role.

Parameters:

Name Type Description Default
role RoleRequestModel

The role model to create.

required

Returns:

Type Description
RoleResponseModel

The newly created role.

Exceptions:

Type Description
EntityExistsError

If a role with the given name already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_role(self, role: RoleRequestModel) -> RoleResponseModel:
    """Creates a new role.

    Args:
        role: The role model to create.

    Returns:
        The newly created role.

    Raises:
        EntityExistsError: If a role with the given name already exists.
    """
create_run(self, pipeline_run)

Creates a pipeline run.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to create.

required

Returns:

Type Description
PipelineRunResponseModel

The created pipeline run.

Exceptions:

Type Description
EntityExistsError

If an identical pipeline run already exists.

KeyError

If the pipeline does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Creates a pipeline run.

    Args:
        pipeline_run: The pipeline run to create.

    Returns:
        The created pipeline run.

    Raises:
        EntityExistsError: If an identical pipeline run already exists.
        KeyError: If the pipeline does not exist.
    """
create_run_metadata(self, run_metadata)

Creates run metadata.

Parameters:

Name Type Description Default
run_metadata RunMetadataRequestModel

The run metadata to create.

required

Returns:

Type Description
RunMetadataResponseModel

The created run metadata.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_run_metadata(
    self, run_metadata: RunMetadataRequestModel
) -> RunMetadataResponseModel:
    """Creates run metadata.

    Args:
        run_metadata: The run metadata to create.

    Returns:
        The created run metadata.
    """
create_run_step(self, step_run)

Creates a step run.

Parameters:

Name Type Description Default
step_run StepRunRequestModel

The step run to create.

required

Returns:

Type Description
StepRunResponseModel

The created step run.

Exceptions:

Type Description
EntityExistsError

if the step run already exists.

KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_run_step(
    self, step_run: StepRunRequestModel
) -> StepRunResponseModel:
    """Creates a step run.

    Args:
        step_run: The step run to create.

    Returns:
        The created step run.

    Raises:
        EntityExistsError: if the step run already exists.
        KeyError: if the pipeline run doesn't exist.
    """
create_schedule(self, schedule)

Creates a new schedule.

Parameters:

Name Type Description Default
schedule ScheduleRequestModel

The schedule to create.

required

Returns:

Type Description
ScheduleResponseModel

The newly created schedule.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_schedule(
    self, schedule: ScheduleRequestModel
) -> ScheduleResponseModel:
    """Creates a new schedule.

    Args:
        schedule: The schedule to create.

    Returns:
        The newly created schedule.
    """
create_service_connector(self, service_connector)

Creates a new service connector.

Parameters:

Name Type Description Default
service_connector ServiceConnectorRequestModel

Service connector to be created.

required

Returns:

Type Description
ServiceConnectorResponseModel

The newly created service connector.

Exceptions:

Type Description
EntityExistsError

If a service connector with the given name is already owned by this user in this workspace.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_service_connector(
    self,
    service_connector: ServiceConnectorRequestModel,
) -> ServiceConnectorResponseModel:
    """Creates a new service connector.

    Args:
        service_connector: Service connector to be created.

    Returns:
        The newly created service connector.

    Raises:
        EntityExistsError: If a service connector with the given name
            is already owned by this user in this workspace.
    """
create_stack(self, stack)

Create a new stack.

Parameters:

Name Type Description Default
stack StackRequestModel

The stack to create.

required

Returns:

Type Description
StackResponseModel

The created stack.

Exceptions:

Type Description
StackExistsError

If a stack with the same name is already owned by this user in this workspace.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_stack(self, stack: StackRequestModel) -> StackResponseModel:
    """Create a new stack.

    Args:
        stack: The stack to create.

    Returns:
        The created stack.

    Raises:
        StackExistsError: If a stack with the same name is already owned
            by this user in this workspace.
    """
create_stack_component(self, component)

Create a stack component.

Parameters:

Name Type Description Default
component ComponentRequestModel

The stack component to create.

required

Returns:

Type Description
ComponentResponseModel

The created stack component.

Exceptions:

Type Description
StackComponentExistsError

If a stack component with the same name and type is already owned by this user in this workspace.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_stack_component(
    self, component: ComponentRequestModel
) -> ComponentResponseModel:
    """Create a stack component.

    Args:
        component: The stack component to create.

    Returns:
        The created stack component.

    Raises:
        StackComponentExistsError: If a stack component with the same name
            and type is already owned by this user in this workspace.
    """
create_team(self, team)

Creates a new team.

Parameters:

Name Type Description Default
team TeamRequestModel

The team model to create.

required

Returns:

Type Description
TeamResponseModel

The newly created team.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_team(self, team: TeamRequestModel) -> TeamResponseModel:
    """Creates a new team.

    Args:
        team: The team model to create.

    Returns:
        The newly created team.
    """
create_team_role_assignment(self, team_role_assignment)

Creates a new team role assignment.

Parameters:

Name Type Description Default
team_role_assignment TeamRoleAssignmentRequestModel

The role assignment model to create.

required

Returns:

Type Description
TeamRoleAssignmentResponseModel

The newly created role assignment.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_team_role_assignment(
    self, team_role_assignment: TeamRoleAssignmentRequestModel
) -> TeamRoleAssignmentResponseModel:
    """Creates a new team role assignment.

    Args:
        team_role_assignment: The role assignment model to create.

    Returns:
        The newly created role assignment.
    """
create_user(self, user)

Creates a new user.

Parameters:

Name Type Description Default
user UserRequestModel

User to be created.

required

Returns:

Type Description
UserResponseModel

The newly created user.

Exceptions:

Type Description
EntityExistsError

If a user with the given name already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_user(self, user: UserRequestModel) -> UserResponseModel:
    """Creates a new user.

    Args:
        user: User to be created.

    Returns:
        The newly created user.

    Raises:
        EntityExistsError: If a user with the given name already exists.
    """
create_user_role_assignment(self, user_role_assignment)

Creates a new role assignment.

Parameters:

Name Type Description Default
user_role_assignment UserRoleAssignmentRequestModel

The role assignment model to create.

required

Returns:

Type Description
UserRoleAssignmentResponseModel

The newly created role assignment.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_user_role_assignment(
    self, user_role_assignment: UserRoleAssignmentRequestModel
) -> UserRoleAssignmentResponseModel:
    """Creates a new role assignment.

    Args:
        user_role_assignment: The role assignment model to create.

    Returns:
        The newly created role assignment.
    """
create_workspace(self, workspace)

Creates a new workspace.

Parameters:

Name Type Description Default
workspace WorkspaceRequestModel

The workspace to create.

required

Returns:

Type Description
WorkspaceResponseModel

The newly created workspace.

Exceptions:

Type Description
EntityExistsError

If a workspace with the given name already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_workspace(
    self, workspace: WorkspaceRequestModel
) -> WorkspaceResponseModel:
    """Creates a new workspace.

    Args:
        workspace: The workspace to create.

    Returns:
        The newly created workspace.

    Raises:
        EntityExistsError: If a workspace with the given name already exists.
    """
delete_artifact(self, artifact_id)

Deletes an artifact.

Parameters:

Name Type Description Default
artifact_id UUID

The ID of the artifact to delete.

required

Exceptions:

Type Description
KeyError

if the artifact doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_artifact(self, artifact_id: UUID) -> None:
    """Deletes an artifact.

    Args:
        artifact_id: The ID of the artifact to delete.

    Raises:
        KeyError: if the artifact doesn't exist.
    """
delete_build(self, build_id)

Deletes a build.

Parameters:

Name Type Description Default
build_id UUID

The ID of the build to delete.

required

Exceptions:

Type Description
KeyError

if the build doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_build(self, build_id: UUID) -> None:
    """Deletes a build.

    Args:
        build_id: The ID of the build to delete.

    Raises:
        KeyError: if the build doesn't exist.
    """
delete_code_repository(self, code_repository_id)

Deletes a code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to delete.

required

Exceptions:

Type Description
KeyError

If no code repository with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_code_repository(self, code_repository_id: UUID) -> None:
    """Deletes a code repository.

    Args:
        code_repository_id: The ID of the code repository to delete.

    Raises:
        KeyError: If no code repository with the given ID exists.
    """
delete_deployment(self, deployment_id)

Deletes a deployment.

Parameters:

Name Type Description Default
deployment_id UUID

The ID of the deployment to delete.

required

Exceptions:

Type Description
KeyError

If the deployment doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_deployment(self, deployment_id: UUID) -> None:
    """Deletes a deployment.

    Args:
        deployment_id: The ID of the deployment to delete.

    Raises:
        KeyError: If the deployment doesn't exist.
    """
delete_flavor(self, flavor_id)

Delete a stack component flavor.

Parameters:

Name Type Description Default
flavor_id UUID

The ID of the stack component flavor to delete.

required

Exceptions:

Type Description
KeyError

if the stack component flavor doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_flavor(self, flavor_id: UUID) -> None:
    """Delete a stack component flavor.

    Args:
        flavor_id: The ID of the stack component flavor to delete.

    Raises:
        KeyError: if the stack component flavor doesn't exist.
    """
delete_pipeline(self, pipeline_id)

Deletes a pipeline.

Parameters:

Name Type Description Default
pipeline_id UUID

The ID of the pipeline to delete.

required

Exceptions:

Type Description
KeyError

if the pipeline doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_pipeline(self, pipeline_id: UUID) -> None:
    """Deletes a pipeline.

    Args:
        pipeline_id: The ID of the pipeline to delete.

    Raises:
        KeyError: if the pipeline doesn't exist.
    """
delete_role(self, role_name_or_id)

Deletes a role.

Parameters:

Name Type Description Default
role_name_or_id Union[str, uuid.UUID]

Name or ID of the role to delete.

required

Exceptions:

Type Description
KeyError

If no role with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_role(self, role_name_or_id: Union[str, UUID]) -> None:
    """Deletes a role.

    Args:
        role_name_or_id: Name or ID of the role to delete.

    Raises:
        KeyError: If no role with the given ID exists.
    """
delete_run(self, run_id)

Deletes a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to delete.

required

Exceptions:

Type Description
KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_run(self, run_id: UUID) -> None:
    """Deletes a pipeline run.

    Args:
        run_id: The ID of the pipeline run to delete.

    Raises:
        KeyError: if the pipeline run doesn't exist.
    """
delete_schedule(self, schedule_id)

Deletes a schedule.

Parameters:

Name Type Description Default
schedule_id UUID

The ID of the schedule to delete.

required

Exceptions:

Type Description
KeyError

if the schedule doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_schedule(self, schedule_id: UUID) -> None:
    """Deletes a schedule.

    Args:
        schedule_id: The ID of the schedule to delete.

    Raises:
        KeyError: if the schedule doesn't exist.
    """
delete_service_connector(self, service_connector_id)

Deletes a service connector.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to delete.

required

Exceptions:

Type Description
KeyError

If no service connector with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_service_connector(self, service_connector_id: UUID) -> None:
    """Deletes a service connector.

    Args:
        service_connector_id: The ID of the service connector to delete.

    Raises:
        KeyError: If no service connector with the given ID exists.
    """
delete_stack(self, stack_id)

Delete a stack.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack to delete.

required

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_stack(self, stack_id: UUID) -> None:
    """Delete a stack.

    Args:
        stack_id: The ID of the stack to delete.

    Raises:
        KeyError: if the stack doesn't exist.
    """
delete_stack_component(self, component_id)

Delete a stack component.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to delete.

required

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

ValueError

if the stack component is part of one or more stacks.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_stack_component(self, component_id: UUID) -> None:
    """Delete a stack component.

    Args:
        component_id: The ID of the stack component to delete.

    Raises:
        KeyError: if the stack component doesn't exist.
        ValueError: if the stack component is part of one or more stacks.
    """
delete_team(self, team_name_or_id)

Deletes a team.

Parameters:

Name Type Description Default
team_name_or_id Union[str, uuid.UUID]

Name or ID of the team to delete.

required

Exceptions:

Type Description
KeyError

If no team with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_team(self, team_name_or_id: Union[str, UUID]) -> None:
    """Deletes a team.

    Args:
        team_name_or_id: Name or ID of the team to delete.

    Raises:
        KeyError: If no team with the given ID exists.
    """
delete_team_role_assignment(self, team_role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
team_role_assignment_id UUID

The ID of the specific role assignment

required
Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_team_role_assignment(
    self, team_role_assignment_id: UUID
) -> None:
    """Delete a specific role assignment.

    Args:
        team_role_assignment_id: The ID of the specific role assignment
    """
delete_user(self, user_name_or_id)

Deletes a user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to delete.

required

Exceptions:

Type Description
KeyError

If no user with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_user(self, user_name_or_id: Union[str, UUID]) -> None:
    """Deletes a user.

    Args:
        user_name_or_id: The name or ID of the user to delete.

    Raises:
        KeyError: If no user with the given ID exists.
    """
delete_user_role_assignment(self, user_role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
user_role_assignment_id UUID

The ID of the specific role assignment

required
Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_user_role_assignment(
    self, user_role_assignment_id: UUID
) -> None:
    """Delete a specific role assignment.

    Args:
        user_role_assignment_id: The ID of the specific role assignment
    """
delete_workspace(self, workspace_name_or_id)

Deletes a workspace.

Parameters:

Name Type Description Default
workspace_name_or_id Union[str, uuid.UUID]

Name or ID of the workspace to delete.

required

Exceptions:

Type Description
KeyError

If no workspace with the given name exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_workspace(self, workspace_name_or_id: Union[str, UUID]) -> None:
    """Deletes a workspace.

    Args:
        workspace_name_or_id: Name or ID of the workspace to delete.

    Raises:
        KeyError: If no workspace with the given name exists.
    """
get_artifact(self, artifact_id)

Gets an artifact.

Parameters:

Name Type Description Default
artifact_id UUID

The ID of the artifact to get.

required

Returns:

Type Description
ArtifactResponseModel

The artifact.

Exceptions:

Type Description
KeyError

if the artifact doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_artifact(self, artifact_id: UUID) -> ArtifactResponseModel:
    """Gets an artifact.

    Args:
        artifact_id: The ID of the artifact to get.

    Returns:
        The artifact.

    Raises:
        KeyError: if the artifact doesn't exist.
    """
get_auth_user(self, user_name_or_id)

Gets the auth model to a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Returns:

Type Description
UserAuthModel

The requested user, if it was found.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_auth_user(
    self, user_name_or_id: Union[str, UUID]
) -> UserAuthModel:
    """Gets the auth model to a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Returns:
        The requested user, if it was found.
    """
get_build(self, build_id)

Get a build with a given ID.

Parameters:

Name Type Description Default
build_id UUID

ID of the build.

required

Returns:

Type Description
PipelineBuildResponseModel

The build.

Exceptions:

Type Description
KeyError

If the build does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_build(self, build_id: UUID) -> PipelineBuildResponseModel:
    """Get a build with a given ID.

    Args:
        build_id: ID of the build.

    Returns:
        The build.

    Raises:
        KeyError: If the build does not exist.
    """
get_code_repository(self, code_repository_id)

Gets a specific code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to get.

required

Returns:

Type Description
CodeRepositoryResponseModel

The requested code repository, if it was found.

Exceptions:

Type Description
KeyError

If no code repository with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_code_repository(
    self, code_repository_id: UUID
) -> CodeRepositoryResponseModel:
    """Gets a specific code repository.

    Args:
        code_repository_id: The ID of the code repository to get.

    Returns:
        The requested code repository, if it was found.

    Raises:
        KeyError: If no code repository with the given ID exists.
    """
get_deployment(self, deployment_id)

Get a deployment with a given ID.

Parameters:

Name Type Description Default
deployment_id UUID

ID of the deployment.

required

Returns:

Type Description
PipelineDeploymentResponseModel

The deployment.

Exceptions:

Type Description
KeyError

If the deployment does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_deployment(
    self, deployment_id: UUID
) -> PipelineDeploymentResponseModel:
    """Get a deployment with a given ID.

    Args:
        deployment_id: ID of the deployment.

    Returns:
        The deployment.

    Raises:
        KeyError: If the deployment does not exist.
    """
get_flavor(self, flavor_id)

Get a stack component flavor by ID.

Parameters:

Name Type Description Default
flavor_id UUID

The ID of the flavor to get.

required

Returns:

Type Description
FlavorResponseModel

The stack component flavor.

Exceptions:

Type Description
KeyError

if the stack component flavor doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
    """Get a stack component flavor by ID.

    Args:
        flavor_id: The ID of the flavor to get.

    Returns:
        The stack component flavor.

    Raises:
        KeyError: if the stack component flavor doesn't exist.
    """
get_or_create_run(self, pipeline_run)

Gets or creates a pipeline run.

If a run with the same ID or name already exists, it is returned. Otherwise, a new run is created.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to get or create.

required

Returns:

Type Description
Tuple[zenml.models.pipeline_run_models.PipelineRunResponseModel, bool]

The pipeline run, and a boolean indicating whether the run was created or not.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_or_create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> Tuple[PipelineRunResponseModel, bool]:
    """Gets or creates a pipeline run.

    If a run with the same ID or name already exists, it is returned.
    Otherwise, a new run is created.

    Args:
        pipeline_run: The pipeline run to get or create.

    Returns:
        The pipeline run, and a boolean indicating whether the run was
        created or not.
    """
get_pipeline(self, pipeline_id)

Get a pipeline with a given ID.

Parameters:

Name Type Description Default
pipeline_id UUID

ID of the pipeline.

required

Returns:

Type Description
PipelineResponseModel

The pipeline.

Exceptions:

Type Description
KeyError

if the pipeline does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
    """Get a pipeline with a given ID.

    Args:
        pipeline_id: ID of the pipeline.

    Returns:
        The pipeline.

    Raises:
        KeyError: if the pipeline does not exist.
    """
get_role(self, role_name_or_id)

Gets a specific role.

Parameters:

Name Type Description Default
role_name_or_id Union[str, uuid.UUID]

Name or ID of the role to get.

required

Returns:

Type Description
RoleResponseModel

The requested role.

Exceptions:

Type Description
KeyError

If no role with the given name exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
    """Gets a specific role.

    Args:
        role_name_or_id: Name or ID of the role to get.

    Returns:
        The requested role.

    Raises:
        KeyError: If no role with the given name exists.
    """
get_run(self, run_name_or_id)

Gets a pipeline run.

Parameters:

Name Type Description Default
run_name_or_id Union[str, uuid.UUID]

The name or ID of the pipeline run to get.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Exceptions:

Type Description
KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_run(
    self, run_name_or_id: Union[str, UUID]
) -> PipelineRunResponseModel:
    """Gets a pipeline run.

    Args:
        run_name_or_id: The name or ID of the pipeline run to get.

    Returns:
        The pipeline run.

    Raises:
        KeyError: if the pipeline run doesn't exist.
    """
get_run_step(self, step_run_id)

Get a step run by ID.

Parameters:

Name Type Description Default
step_run_id UUID

The ID of the step run to get.

required

Returns:

Type Description
StepRunResponseModel

The step run.

Exceptions:

Type Description
KeyError

if the step run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_run_step(self, step_run_id: UUID) -> StepRunResponseModel:
    """Get a step run by ID.

    Args:
        step_run_id: The ID of the step run to get.

    Returns:
        The step run.

    Raises:
        KeyError: if the step run doesn't exist.
    """
get_schedule(self, schedule_id)

Get a schedule with a given ID.

Parameters:

Name Type Description Default
schedule_id UUID

ID of the schedule.

required

Returns:

Type Description
ScheduleResponseModel

The schedule.

Exceptions:

Type Description
KeyError

if the schedule does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_schedule(self, schedule_id: UUID) -> ScheduleResponseModel:
    """Get a schedule with a given ID.

    Args:
        schedule_id: ID of the schedule.

    Returns:
        The schedule.

    Raises:
        KeyError: if the schedule does not exist.
    """
get_service_connector(self, service_connector_id)

Gets a specific service connector.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to get.

required

Returns:

Type Description
ServiceConnectorResponseModel

The requested service connector, if it was found.

Exceptions:

Type Description
KeyError

If no service connector with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_service_connector(
    self, service_connector_id: UUID
) -> ServiceConnectorResponseModel:
    """Gets a specific service connector.

    Args:
        service_connector_id: The ID of the service connector to get.

    Returns:
        The requested service connector, if it was found.

    Raises:
        KeyError: If no service connector with the given ID exists.
    """
get_service_connector_client(self, service_connector_id, resource_type=None, resource_id=None)

Get a service connector client for a service connector and given resource.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the base service connector to use.

required
resource_type Optional[str]

The type of resource to get a client for.

None
resource_id Optional[str]

The ID of the resource to get a client for.

None

Returns:

Type Description
ServiceConnectorResponseModel

A service connector client that can be used to access the given resource.

Exceptions:

Type Description
KeyError

If no service connector with the given name exists.

NotImplementError

If the service connector cannot be instantiated on the store e.g. due to missing package dependencies.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_service_connector_client(
    self,
    service_connector_id: UUID,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
) -> ServiceConnectorResponseModel:
    """Get a service connector client for a service connector and given resource.

    Args:
        service_connector_id: The ID of the base service connector to use.
        resource_type: The type of resource to get a client for.
        resource_id: The ID of the resource to get a client for.

    Returns:
        A service connector client that can be used to access the given
        resource.

    Raises:
        KeyError: If no service connector with the given name exists.
        NotImplementError: If the service connector cannot be instantiated
            on the store e.g. due to missing package dependencies.
    """
get_service_connector_type(self, connector_type)

Returns the requested service connector type.

Parameters:

Name Type Description Default
connector_type str

the service connector type identifier.

required

Returns:

Type Description
ServiceConnectorTypeModel

The requested service connector type.

Exceptions:

Type Description
KeyError

If no service connector type with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_service_connector_type(
    self,
    connector_type: str,
) -> ServiceConnectorTypeModel:
    """Returns the requested service connector type.

    Args:
        connector_type: the service connector type identifier.

    Returns:
        The requested service connector type.

    Raises:
        KeyError: If no service connector type with the given ID exists.
    """
get_stack(self, stack_id)

Get a stack by its unique ID.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack to get.

required

Returns:

Type Description
StackResponseModel

The stack with the given ID.

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_stack(self, stack_id: UUID) -> StackResponseModel:
    """Get a stack by its unique ID.

    Args:
        stack_id: The ID of the stack to get.

    Returns:
        The stack with the given ID.

    Raises:
        KeyError: if the stack doesn't exist.
    """
get_stack_component(self, component_id)

Get a stack component by ID.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to get.

required

Returns:

Type Description
ComponentResponseModel

The stack component.

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_stack_component(
    self, component_id: UUID
) -> ComponentResponseModel:
    """Get a stack component by ID.

    Args:
        component_id: The ID of the stack component to get.

    Returns:
        The stack component.

    Raises:
        KeyError: if the stack component doesn't exist.
    """
get_store_info(self)

Get information about the store.

Returns:

Type Description
ServerModel

Information about the store.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_store_info(self) -> ServerModel:
    """Get information about the store.

    Returns:
        Information about the store.
    """
get_team(self, team_name_or_id)

Gets a specific team.

Parameters:

Name Type Description Default
team_name_or_id Union[str, uuid.UUID]

Name or ID of the team to get.

required

Returns:

Type Description
TeamResponseModel

The requested team.

Exceptions:

Type Description
KeyError

If no team with the given name or ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
    """Gets a specific team.

    Args:
        team_name_or_id: Name or ID of the team to get.

    Returns:
        The requested team.

    Raises:
        KeyError: If no team with the given name or ID exists.
    """
get_team_role_assignment(self, team_role_assignment_id)

Gets a specific role assignment.

Parameters:

Name Type Description Default
team_role_assignment_id UUID

ID of the role assignment to get.

required

Returns:

Type Description
TeamRoleAssignmentResponseModel

The requested role assignment.

Exceptions:

Type Description
KeyError

If no role assignment with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_team_role_assignment(
    self, team_role_assignment_id: UUID
) -> TeamRoleAssignmentResponseModel:
    """Gets a specific role assignment.

    Args:
        team_role_assignment_id: ID of the role assignment to get.

    Returns:
        The requested role assignment.

    Raises:
        KeyError: If no role assignment with the given ID exists.
    """
get_user(self, user_name_or_id=None, include_private=False)

Gets a specific user, when no id is specified the active user is returned.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

None
include_private bool

Whether to include private user information

False

Returns:

Type Description
UserResponseModel

The requested user, if it was found.

Exceptions:

Type Description
KeyError

If no user with the given name or ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_user(
    self,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    include_private: bool = False,
) -> UserResponseModel:
    """Gets a specific user, when no id is specified the active user is returned.

    Args:
        user_name_or_id: The name or ID of the user to get.
        include_private: Whether to include private user information

    Returns:
        The requested user, if it was found.

    Raises:
        KeyError: If no user with the given name or ID exists.
    """
get_user_role_assignment(self, user_role_assignment_id)

Gets a specific role assignment.

Parameters:

Name Type Description Default
user_role_assignment_id UUID

ID of the role assignment to get.

required

Returns:

Type Description
UserRoleAssignmentResponseModel

The requested role assignment.

Exceptions:

Type Description
KeyError

If no role assignment with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_user_role_assignment(
    self, user_role_assignment_id: UUID
) -> UserRoleAssignmentResponseModel:
    """Gets a specific role assignment.

    Args:
        user_role_assignment_id: ID of the role assignment to get.

    Returns:
        The requested role assignment.

    Raises:
        KeyError: If no role assignment with the given ID exists.
    """
get_workspace(self, workspace_name_or_id)

Get an existing workspace by name or ID.

Parameters:

Name Type Description Default
workspace_name_or_id Union[uuid.UUID, str]

Name or ID of the workspace to get.

required

Returns:

Type Description
WorkspaceResponseModel

The requested workspace.

Exceptions:

Type Description
KeyError

If there is no such workspace.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_workspace(
    self, workspace_name_or_id: Union[UUID, str]
) -> WorkspaceResponseModel:
    """Get an existing workspace by name or ID.

    Args:
        workspace_name_or_id: Name or ID of the workspace to get.

    Returns:
        The requested workspace.

    Raises:
        KeyError: If there is no such workspace.
    """
list_artifacts(self, artifact_filter_model)

List all artifacts matching the given filter criteria.

Parameters:

Name Type Description Default
artifact_filter_model ArtifactFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ArtifactResponseModel]

A list of all artifacts matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_artifacts(
    self, artifact_filter_model: ArtifactFilterModel
) -> Page[ArtifactResponseModel]:
    """List all artifacts matching the given filter criteria.

    Args:
        artifact_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all artifacts matching the filter criteria.
    """
list_builds(self, build_filter_model)

List all builds matching the given filter criteria.

Parameters:

Name Type Description Default
build_filter_model PipelineBuildFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineBuildResponseModel]

A page of all builds matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_builds(
    self, build_filter_model: PipelineBuildFilterModel
) -> Page[PipelineBuildResponseModel]:
    """List all builds matching the given filter criteria.

    Args:
        build_filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all builds matching the filter criteria.
    """
list_code_repositories(self, filter_model)

List all code repositories.

Parameters:

Name Type Description Default
filter_model CodeRepositoryFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[CodeRepositoryResponseModel]

A page of all code repositories.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_code_repositories(
    self, filter_model: CodeRepositoryFilterModel
) -> Page[CodeRepositoryResponseModel]:
    """List all code repositories.

    Args:
        filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all code repositories.
    """
list_deployments(self, deployment_filter_model)

List all deployments matching the given filter criteria.

Parameters:

Name Type Description Default
deployment_filter_model PipelineDeploymentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineDeploymentResponseModel]

A page of all deployments matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_deployments(
    self, deployment_filter_model: PipelineDeploymentFilterModel
) -> Page[PipelineDeploymentResponseModel]:
    """List all deployments matching the given filter criteria.

    Args:
        deployment_filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all deployments matching the filter criteria.
    """
list_flavors(self, flavor_filter_model)

List all stack component flavors matching the given filter criteria.

Parameters:

Name Type Description Default
flavor_filter_model FlavorFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[FlavorResponseModel]

List of all the stack component flavors matching the given criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_flavors(
    self, flavor_filter_model: FlavorFilterModel
) -> Page[FlavorResponseModel]:
    """List all stack component flavors matching the given filter criteria.

    Args:
        flavor_filter_model: All filter parameters including pagination
            params.

    Returns:
        List of all the stack component flavors matching the given criteria.
    """
list_pipelines(self, pipeline_filter_model)

List all pipelines matching the given filter criteria.

Parameters:

Name Type Description Default
pipeline_filter_model PipelineFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineResponseModel]

A list of all pipelines matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_pipelines(
    self, pipeline_filter_model: PipelineFilterModel
) -> Page[PipelineResponseModel]:
    """List all pipelines matching the given filter criteria.

    Args:
        pipeline_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all pipelines matching the filter criteria.
    """
list_roles(self, role_filter_model)

List all roles matching the given filter criteria.

Parameters:

Name Type Description Default
role_filter_model RoleFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[RoleResponseModel]

A list of all roles matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_roles(
    self, role_filter_model: RoleFilterModel
) -> Page[RoleResponseModel]:
    """List all roles matching the given filter criteria.

    Args:
        role_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all roles matching the filter criteria.
    """
list_run_metadata(self, run_metadata_filter_model)

List run metadata.

Parameters:

Name Type Description Default
run_metadata_filter_model RunMetadataFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[RunMetadataResponseModel]

The run metadata.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_run_metadata(
    self,
    run_metadata_filter_model: RunMetadataFilterModel,
) -> Page[RunMetadataResponseModel]:
    """List run metadata.

    Args:
        run_metadata_filter_model: All filter parameters including
            pagination params.

    Returns:
        The run metadata.
    """
list_run_steps(self, step_run_filter_model)

List all step runs matching the given filter criteria.

Parameters:

Name Type Description Default
step_run_filter_model StepRunFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[StepRunResponseModel]

A list of all step runs matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_run_steps(
    self, step_run_filter_model: StepRunFilterModel
) -> Page[StepRunResponseModel]:
    """List all step runs matching the given filter criteria.

    Args:
        step_run_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all step runs matching the filter criteria.
    """
list_runs(self, runs_filter_model)

List all pipeline runs matching the given filter criteria.

Parameters:

Name Type Description Default
runs_filter_model PipelineRunFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[PipelineRunResponseModel]

A list of all pipeline runs matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_runs(
    self, runs_filter_model: PipelineRunFilterModel
) -> Page[PipelineRunResponseModel]:
    """List all pipeline runs matching the given filter criteria.

    Args:
        runs_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all pipeline runs matching the filter criteria.
    """
list_schedules(self, schedule_filter_model)

List all schedules in the workspace.

Parameters:

Name Type Description Default
schedule_filter_model ScheduleFilterModel

All filter parameters including pagination params

required

Returns:

Type Description
Page[ScheduleResponseModel]

A list of schedules.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_schedules(
    self, schedule_filter_model: ScheduleFilterModel
) -> Page[ScheduleResponseModel]:
    """List all schedules in the workspace.

    Args:
        schedule_filter_model: All filter parameters including pagination
            params

    Returns:
        A list of schedules.
    """
list_service_connector_resources(self, user_name_or_id, workspace_name_or_id, connector_type=None, resource_type=None, resource_id=None)

List resources that can be accessed by service connectors.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to scope to.

required
workspace_name_or_id Union[str, uuid.UUID]

The name or ID of the workspace to scope to.

required
connector_type Optional[str]

The type of service connector to scope to.

None
resource_type Optional[str]

The type of resource to scope to.

None
resource_id Optional[str]

The ID of the resource to scope to.

None

Returns:

Type Description
List[zenml.models.service_connector_models.ServiceConnectorResourcesModel]

The matching list of resources that available service connectors have access to.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_service_connector_resources(
    self,
    user_name_or_id: Union[str, UUID],
    workspace_name_or_id: Union[str, UUID],
    connector_type: Optional[str] = None,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
) -> List[ServiceConnectorResourcesModel]:
    """List resources that can be accessed by service connectors.

    Args:
        user_name_or_id: The name or ID of the user to scope to.
        workspace_name_or_id: The name or ID of the workspace to scope to.
        connector_type: The type of service connector to scope to.
        resource_type: The type of resource to scope to.
        resource_id: The ID of the resource to scope to.

    Returns:
        The matching list of resources that available service
        connectors have access to.
    """
list_service_connector_types(self, connector_type=None, resource_type=None, auth_method=None)

Get a list of service connector types.

Parameters:

Name Type Description Default
connector_type Optional[str]

Filter by connector type.

None
resource_type Optional[str]

Filter by resource type.

None
auth_method Optional[str]

Filter by authentication method.

None

Returns:

Type Description
List[zenml.models.service_connector_models.ServiceConnectorTypeModel]

List of service connector types.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_service_connector_types(
    self,
    connector_type: Optional[str] = None,
    resource_type: Optional[str] = None,
    auth_method: Optional[str] = None,
) -> List[ServiceConnectorTypeModel]:
    """Get a list of service connector types.

    Args:
        connector_type: Filter by connector type.
        resource_type: Filter by resource type.
        auth_method: Filter by authentication method.

    Returns:
        List of service connector types.
    """
list_service_connectors(self, filter_model)

List all service connectors.

Parameters:

Name Type Description Default
filter_model ServiceConnectorFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ServiceConnectorResponseModel]

A page of all service connectors.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_service_connectors(
    self, filter_model: ServiceConnectorFilterModel
) -> Page[ServiceConnectorResponseModel]:
    """List all service connectors.

    Args:
        filter_model: All filter parameters including pagination
            params.

    Returns:
        A page of all service connectors.
    """
list_stack_components(self, component_filter_model)

List all stack components matching the given filter criteria.

Parameters:

Name Type Description Default
component_filter_model ComponentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[ComponentResponseModel]

A list of all stack components matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_stack_components(
    self, component_filter_model: ComponentFilterModel
) -> Page[ComponentResponseModel]:
    """List all stack components matching the given filter criteria.

    Args:
        component_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all stack components matching the filter criteria.
    """
list_stacks(self, stack_filter_model)

List all stacks matching the given filter criteria.

Parameters:

Name Type Description Default
stack_filter_model StackFilterModel

All filter parameters including pagination params

required

Returns:

Type Description
Page[StackResponseModel]

A list of all stacks matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_stacks(
    self, stack_filter_model: StackFilterModel
) -> Page[StackResponseModel]:
    """List all stacks matching the given filter criteria.

    Args:
        stack_filter_model: All filter parameters including pagination
            params

    Returns:
        A list of all stacks matching the filter criteria.
    """
list_team_role_assignments(self, team_role_assignment_filter_model)

List all roles assignments matching the given filter criteria.

Parameters:

Name Type Description Default
team_role_assignment_filter_model TeamRoleAssignmentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[TeamRoleAssignmentResponseModel]

A list of all roles assignments matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_team_role_assignments(
    self, team_role_assignment_filter_model: TeamRoleAssignmentFilterModel
) -> Page[TeamRoleAssignmentResponseModel]:
    """List all roles assignments matching the given filter criteria.

    Args:
        team_role_assignment_filter_model: All filter parameters including
            pagination params.

    Returns:
        A list of all roles assignments matching the filter criteria.
    """
list_teams(self, team_filter_model)

List all teams matching the given filter criteria.

Parameters:

Name Type Description Default
team_filter_model TeamFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[TeamResponseModel]

A list of all teams matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_teams(
    self, team_filter_model: TeamFilterModel
) -> Page[TeamResponseModel]:
    """List all teams matching the given filter criteria.

    Args:
        team_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all teams matching the filter criteria.
    """
list_user_role_assignments(self, user_role_assignment_filter_model)

List all roles assignments matching the given filter criteria.

Parameters:

Name Type Description Default
user_role_assignment_filter_model UserRoleAssignmentFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[UserRoleAssignmentResponseModel]

A list of all roles assignments matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_user_role_assignments(
    self, user_role_assignment_filter_model: UserRoleAssignmentFilterModel
) -> Page[UserRoleAssignmentResponseModel]:
    """List all roles assignments matching the given filter criteria.

    Args:
        user_role_assignment_filter_model: All filter parameters including
            pagination params.

    Returns:
        A list of all roles assignments matching the filter criteria.
    """
list_users(self, user_filter_model)

List all users.

Parameters:

Name Type Description Default
user_filter_model UserFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[UserResponseModel]

A list of all users.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_users(
    self, user_filter_model: UserFilterModel
) -> Page[UserResponseModel]:
    """List all users.

    Args:
        user_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all users.
    """
list_workspaces(self, workspace_filter_model)

List all workspace matching the given filter criteria.

Parameters:

Name Type Description Default
workspace_filter_model WorkspaceFilterModel

All filter parameters including pagination params.

required

Returns:

Type Description
Page[WorkspaceResponseModel]

A list of all workspace matching the filter criteria.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_workspaces(
    self, workspace_filter_model: WorkspaceFilterModel
) -> Page[WorkspaceResponseModel]:
    """List all workspace matching the given filter criteria.

    Args:
        workspace_filter_model: All filter parameters including pagination
            params.

    Returns:
        A list of all workspace matching the filter criteria.
    """
update_code_repository(self, code_repository_id, update)

Updates an existing code repository.

Parameters:

Name Type Description Default
code_repository_id UUID

The ID of the code repository to update.

required
update CodeRepositoryUpdateModel

The update to be applied to the code repository.

required

Returns:

Type Description
CodeRepositoryResponseModel

The updated code repository.

Exceptions:

Type Description
KeyError

If no code repository with the given name exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_code_repository(
    self, code_repository_id: UUID, update: CodeRepositoryUpdateModel
) -> CodeRepositoryResponseModel:
    """Updates an existing code repository.

    Args:
        code_repository_id: The ID of the code repository to update.
        update: The update to be applied to the code repository.

    Returns:
        The updated code repository.

    Raises:
        KeyError: If no code repository with the given name exists.
    """
update_flavor(self, flavor_id, flavor_update)

Updates an existing user.

Parameters:

Name Type Description Default
flavor_id UUID

The id of the flavor to update.

required
flavor_update FlavorUpdateModel

The update to be applied to the flavor.

required

Returns:

Type Description
FlavorResponseModel

The updated flavor.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_flavor(
    self, flavor_id: UUID, flavor_update: FlavorUpdateModel
) -> FlavorResponseModel:
    """Updates an existing user.

    Args:
        flavor_id: The id of the flavor to update.
        flavor_update: The update to be applied to the flavor.

    Returns:
        The updated flavor.
    """
update_pipeline(self, pipeline_id, pipeline_update)

Updates a pipeline.

Parameters:

Name Type Description Default
pipeline_id UUID

The ID of the pipeline to be updated.

required
pipeline_update PipelineUpdateModel

The update to be applied.

required

Returns:

Type Description
PipelineResponseModel

The updated pipeline.

Exceptions:

Type Description
KeyError

if the pipeline doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_pipeline(
    self,
    pipeline_id: UUID,
    pipeline_update: PipelineUpdateModel,
) -> PipelineResponseModel:
    """Updates a pipeline.

    Args:
        pipeline_id: The ID of the pipeline to be updated.
        pipeline_update: The update to be applied.

    Returns:
        The updated pipeline.

    Raises:
        KeyError: if the pipeline doesn't exist.
    """
update_role(self, role_id, role_update)

Update an existing role.

Parameters:

Name Type Description Default
role_id UUID

The ID of the role to be updated.

required
role_update RoleUpdateModel

The update to be applied to the role.

required

Returns:

Type Description
RoleResponseModel

The updated role.

Exceptions:

Type Description
KeyError

if the role does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_role(
    self, role_id: UUID, role_update: RoleUpdateModel
) -> RoleResponseModel:
    """Update an existing role.

    Args:
        role_id: The ID of the role to be updated.
        role_update: The update to be applied to the role.

    Returns:
        The updated role.

    Raises:
        KeyError: if the role does not exist.
    """
update_run(self, run_id, run_update)

Updates a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to update.

required
run_update PipelineRunUpdateModel

The update to be applied to the pipeline run.

required

Returns:

Type Description
PipelineRunResponseModel

The updated pipeline run.

Exceptions:

Type Description
KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_run(
    self, run_id: UUID, run_update: PipelineRunUpdateModel
) -> PipelineRunResponseModel:
    """Updates a pipeline run.

    Args:
        run_id: The ID of the pipeline run to update.
        run_update: The update to be applied to the pipeline run.

    Returns:
        The updated pipeline run.

    Raises:
        KeyError: if the pipeline run doesn't exist.
    """
update_run_step(self, step_run_id, step_run_update)

Updates a step run.

Parameters:

Name Type Description Default
step_run_id UUID

The ID of the step to update.

required
step_run_update StepRunUpdateModel

The update to be applied to the step.

required

Returns:

Type Description
StepRunResponseModel

The updated step run.

Exceptions:

Type Description
KeyError

if the step run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_run_step(
    self,
    step_run_id: UUID,
    step_run_update: StepRunUpdateModel,
) -> StepRunResponseModel:
    """Updates a step run.

    Args:
        step_run_id: The ID of the step to update.
        step_run_update: The update to be applied to the step.

    Returns:
        The updated step run.

    Raises:
        KeyError: if the step run doesn't exist.
    """
update_schedule(self, schedule_id, schedule_update)

Updates a schedule.

Parameters:

Name Type Description Default
schedule_id UUID

The ID of the schedule to be updated.

required
schedule_update ScheduleUpdateModel

The update to be applied.

required

Returns:

Type Description
ScheduleResponseModel

The updated schedule.

Exceptions:

Type Description
KeyError

if the schedule doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_schedule(
    self,
    schedule_id: UUID,
    schedule_update: ScheduleUpdateModel,
) -> ScheduleResponseModel:
    """Updates a schedule.

    Args:
        schedule_id: The ID of the schedule to be updated.
        schedule_update: The update to be applied.

    Returns:
        The updated schedule.

    Raises:
        KeyError: if the schedule doesn't exist.
    """
update_service_connector(self, service_connector_id, update)

Updates an existing service connector.

The update model contains the fields to be updated. If a field value is set to None in the model, the field is not updated, but there are special rules concerning some fields:

  • the configuration and secrets fields together represent a full valid configuration update, not just a partial update. If either is set (i.e. not None) in the update, their values are merged together and will replace the existing configuration and secrets values.
  • the resource_id field value is also a full replacement value: if set to None, the resource ID is removed from the service connector.
  • the expiration_seconds field value is also a full replacement value: if set to None, the expiration is removed from the service connector.
  • the secret_id field value in the update is ignored, given that secrets are managed internally by the ZenML store.
  • the labels field is also a full labels update: if set (i.e. not None), all existing labels are removed and replaced by the new labels in the update.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to update.

required
update ServiceConnectorUpdateModel

The update to be applied to the service connector.

required

Returns:

Type Description
ServiceConnectorResponseModel

The updated service connector.

Exceptions:

Type Description
KeyError

If no service connector with the given name exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_service_connector(
    self, service_connector_id: UUID, update: ServiceConnectorUpdateModel
) -> ServiceConnectorResponseModel:
    """Updates an existing service connector.

    The update model contains the fields to be updated. If a field value is
    set to None in the model, the field is not updated, but there are
    special rules concerning some fields:

    * the `configuration` and `secrets` fields together represent a full
    valid configuration update, not just a partial update. If either is
    set (i.e. not None) in the update, their values are merged together and
    will replace the existing configuration and secrets values.
    * the `resource_id` field value is also a full replacement value: if set
    to `None`, the resource ID is removed from the service connector.
    * the `expiration_seconds` field value is also a full replacement value:
    if set to `None`, the expiration is removed from the service connector.
    * the `secret_id` field value in the update is ignored, given that
    secrets are managed internally by the ZenML store.
    * the `labels` field is also a full labels update: if set (i.e. not
    `None`), all existing labels are removed and replaced by the new labels
    in the update.

    Args:
        service_connector_id: The ID of the service connector to update.
        update: The update to be applied to the service connector.

    Returns:
        The updated service connector.

    Raises:
        KeyError: If no service connector with the given name exists.
    """
update_stack(self, stack_id, stack_update)

Update a stack.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack update.

required
stack_update StackUpdateModel

The update request on the stack.

required

Returns:

Type Description
StackResponseModel

The updated stack.

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_stack(
    self, stack_id: UUID, stack_update: StackUpdateModel
) -> StackResponseModel:
    """Update a stack.

    Args:
        stack_id: The ID of the stack update.
        stack_update: The update request on the stack.

    Returns:
        The updated stack.

    Raises:
        KeyError: if the stack doesn't exist.
    """
update_stack_component(self, component_id, component_update)

Update an existing stack component.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to update.

required
component_update ComponentUpdateModel

The update to be applied to the stack component.

required

Returns:

Type Description
ComponentResponseModel

The updated stack component.

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_stack_component(
    self,
    component_id: UUID,
    component_update: ComponentUpdateModel,
) -> ComponentResponseModel:
    """Update an existing stack component.

    Args:
        component_id: The ID of the stack component to update.
        component_update: The update to be applied to the stack component.

    Returns:
        The updated stack component.

    Raises:
        KeyError: if the stack component doesn't exist.
    """
update_team(self, team_id, team_update)

Update an existing team.

Parameters:

Name Type Description Default
team_id UUID

The ID of the team to be updated.

required
team_update TeamUpdateModel

The update to be applied to the team.

required

Returns:

Type Description
TeamResponseModel

The updated team.

Exceptions:

Type Description
KeyError

if the team does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_team(
    self, team_id: UUID, team_update: TeamUpdateModel
) -> TeamResponseModel:
    """Update an existing team.

    Args:
        team_id: The ID of the team to be updated.
        team_update: The update to be applied to the team.

    Returns:
        The updated team.

    Raises:
        KeyError: if the team does not exist.
    """
update_user(self, user_id, user_update)

Updates an existing user.

Parameters:

Name Type Description Default
user_id UUID

The id of the user to update.

required
user_update UserUpdateModel

The update to be applied to the user.

required

Returns:

Type Description
UserResponseModel

The updated user.

Exceptions:

Type Description
KeyError

If no user with the given name exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_user(
    self, user_id: UUID, user_update: UserUpdateModel
) -> UserResponseModel:
    """Updates an existing user.

    Args:
        user_id: The id of the user to update.
        user_update: The update to be applied to the user.

    Returns:
        The updated user.

    Raises:
        KeyError: If no user with the given name exists.
    """
update_workspace(self, workspace_id, workspace_update)

Update an existing workspace.

Parameters:

Name Type Description Default
workspace_id UUID

The ID of the workspace to be updated.

required
workspace_update WorkspaceUpdateModel

The update to be applied to the workspace.

required

Returns:

Type Description
WorkspaceResponseModel

The updated workspace.

Exceptions:

Type Description
KeyError

if the workspace does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_workspace(
    self, workspace_id: UUID, workspace_update: WorkspaceUpdateModel
) -> WorkspaceResponseModel:
    """Update an existing workspace.

    Args:
        workspace_id: The ID of the workspace to be updated.
        workspace_update: The update to be applied to the workspace.

    Returns:
        The updated workspace.

    Raises:
        KeyError: if the workspace does not exist.
    """
verify_service_connector(self, service_connector_id, resource_type=None, resource_id=None, list_resources=True)

Verifies if a service connector instance has access to one or more resources.

Parameters:

Name Type Description Default
service_connector_id UUID

The ID of the service connector to verify.

required
resource_type Optional[str]

The type of resource to verify access to.

None
resource_id Optional[str]

The ID of the resource to verify access to.

None
list_resources bool

If True, the list of all resources accessible through the service connector and matching the supplied resource type and ID are returned.

True

Returns:

Type Description
ServiceConnectorResourcesModel

The list of resources that the service connector has access to, scoped to the supplied resource type and ID, if provided.

Exceptions:

Type Description
KeyError

If no service connector with the given name exists.

NotImplementError

If the service connector cannot be verified e.g. due to missing package dependencies.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def verify_service_connector(
    self,
    service_connector_id: UUID,
    resource_type: Optional[str] = None,
    resource_id: Optional[str] = None,
    list_resources: bool = True,
) -> ServiceConnectorResourcesModel:
    """Verifies if a service connector instance has access to one or more resources.

    Args:
        service_connector_id: The ID of the service connector to verify.
        resource_type: The type of resource to verify access to.
        resource_id: The ID of the resource to verify access to.
        list_resources: If True, the list of all resources accessible
            through the service connector and matching the supplied resource
            type and ID are returned.

    Returns:
        The list of resources that the service connector has access to,
        scoped to the supplied resource type and ID, if provided.

    Raises:
        KeyError: If no service connector with the given name exists.
        NotImplementError: If the service connector cannot be verified
            e.g. due to missing package dependencies.
    """
verify_service_connector_config(self, service_connector, list_resources=True)

Verifies if a service connector configuration has access to resources.

Parameters:

Name Type Description Default
service_connector ServiceConnectorRequestModel

The service connector configuration to verify.

required
list_resources bool

If True, the list of all resources accessible through the service connector is returned.

True

Returns:

Type Description
ServiceConnectorResourcesModel

The list of resources that the service connector configuration has access to.

Exceptions:

Type Description
NotImplementError

If the service connector cannot be verified on the store e.g. due to missing package dependencies.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def verify_service_connector_config(
    self,
    service_connector: ServiceConnectorRequestModel,
    list_resources: bool = True,
) -> ServiceConnectorResourcesModel:
    """Verifies if a service connector configuration has access to resources.

    Args:
        service_connector: The service connector configuration to verify.
        list_resources: If True, the list of all resources accessible
            through the service connector is returned.

    Returns:
        The list of resources that the service connector configuration has
        access to.

    Raises:
        NotImplementError: If the service connector cannot be verified
            on the store e.g. due to missing package dependencies.
    """