Skip to content

Zen Stores

zenml.zen_stores special

ZenStores define ways to store ZenML relevant data locally or remotely.

base_zen_store

Base Zen Store implementation.

BaseZenStore (BaseModel, ZenStoreInterface, AnalyticsTrackerMixin, ABC) pydantic-model

Base class for accessing and persisting ZenML core objects.

Attributes:

Name Type Description
config StoreConfiguration

The configuration of the store.

track_analytics bool

Only send analytics if set to True.

Source code in zenml/zen_stores/base_zen_store.py
class BaseZenStore(BaseModel, ZenStoreInterface, AnalyticsTrackerMixin, ABC):
    """Base class for accessing and persisting ZenML core objects.

    Attributes:
        config: The configuration of the store.
        track_analytics: Only send analytics if set to `True`.
    """

    config: StoreConfiguration
    track_analytics: bool = True
    _active_user: Optional[UserResponseModel] = None

    TYPE: ClassVar[StoreType]
    CONFIG_TYPE: ClassVar[Type[StoreConfiguration]]

    # ---------------------------------
    # Initialization and configuration
    # ---------------------------------

    def __init__(
        self,
        skip_default_registrations: bool = False,
        **kwargs: Any,
    ) -> None:
        """Create and initialize a store.

        Args:
            skip_default_registrations: If `True`, the creation of the default
                stack and user in the store will be skipped.
            **kwargs: Additional keyword arguments to pass to the Pydantic
                constructor.

        Raises:
            RuntimeError: If the store cannot be initialized.
        """
        super().__init__(**kwargs)

        try:
            self._initialize()
        except Exception as e:
            raise RuntimeError(
                f"Error initializing {self.type.value} store with URL "
                f"'{self.url}': {str(e)}"
            ) from e

        if not skip_default_registrations:
            logger.debug("Initializing database")
            self._initialize_database()
        else:
            logger.debug("Skipping database initialization")

    @staticmethod
    def get_store_class(store_type: StoreType) -> Type["BaseZenStore"]:
        """Returns the class of the given store type.

        Args:
            store_type: The type of the store to get the class for.

        Returns:
            The class of the given store type or None if the type is unknown.

        Raises:
            TypeError: If the store type is unsupported.
        """
        if store_type == StoreType.SQL:
            from zenml.zen_stores.sql_zen_store import SqlZenStore

            return SqlZenStore
        elif store_type == StoreType.REST:
            from zenml.zen_stores.rest_zen_store import RestZenStore

            return RestZenStore
        else:
            raise TypeError(
                f"No store implementation found for store type "
                f"`{store_type.value}`."
            )

    @staticmethod
    def get_store_config_class(
        store_type: StoreType,
    ) -> Type["StoreConfiguration"]:
        """Returns the store config class of the given store type.

        Args:
            store_type: The type of the store to get the class for.

        Returns:
            The config class of the given store type.
        """
        store_class = BaseZenStore.get_store_class(store_type)
        return store_class.CONFIG_TYPE

    @staticmethod
    def get_store_type(url: str) -> StoreType:
        """Returns the store type associated with a URL schema.

        Args:
            url: The store URL.

        Returns:
            The store type associated with the supplied URL schema.

        Raises:
            TypeError: If no store type was found to support the supplied URL.
        """
        from zenml.zen_stores.rest_zen_store import RestZenStoreConfiguration
        from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

        if SqlZenStoreConfiguration.supports_url_scheme(url):
            return StoreType.SQL
        elif RestZenStoreConfiguration.supports_url_scheme(url):
            return StoreType.REST
        else:
            raise TypeError(f"No store implementation found for URL: {url}.")

    @staticmethod
    def create_store(
        config: StoreConfiguration,
        skip_default_registrations: bool = False,
        **kwargs: Any,
    ) -> "BaseZenStore":
        """Create and initialize a store from a store configuration.

        Args:
            config: The store configuration to use.
            skip_default_registrations: If `True`, the creation of the default
                stack and user in the store will be skipped.
            **kwargs: Additional keyword arguments to pass to the store class

        Returns:
            The initialized store.
        """
        logger.debug(f"Creating store with config '{config}'...")
        store_class = BaseZenStore.get_store_class(config.type)
        store = store_class(
            config=config,
            skip_default_registrations=skip_default_registrations,
            **kwargs,
        )
        return store

    @staticmethod
    def get_default_store_config(path: str) -> StoreConfiguration:
        """Get the default store configuration.

        The default store is a SQLite store that saves the DB contents on the
        local filesystem.

        Args:
            path: The local path where the store DB will be stored.

        Returns:
            The default store configuration.
        """
        from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

        config = SqlZenStoreConfiguration(
            type=StoreType.SQL, url=SqlZenStoreConfiguration.get_local_url(path)
        )
        return config

    def _initialize_database(self) -> None:
        """Initialize the database on first use."""
        try:
            default_project = self._default_project
        except KeyError:
            default_project = self._create_default_project()
        try:
            assert self._admin_role
        except KeyError:
            self._create_admin_role()
        try:
            assert self._guest_role
        except KeyError:
            self._create_guest_role()
        try:
            default_user = self._default_user
        except KeyError:
            default_user = self._create_default_user()
        try:
            self._get_default_stack(
                project_name_or_id=default_project.id,
                user_name_or_id=default_user.id,
            )
        except KeyError:
            self._create_default_stack(
                project_name_or_id=default_project.id,
                user_name_or_id=default_user.id,
            )

    @property
    def url(self) -> str:
        """The URL of the store.

        Returns:
            The URL of the store.
        """
        return self.config.url

    @property
    def type(self) -> StoreType:
        """The type of the store.

        Returns:
            The type of the store.
        """
        return self.TYPE

    def validate_active_config(
        self,
        active_project_name_or_id: Optional[Union[str, UUID]] = None,
        active_stack_id: Optional[UUID] = None,
        config_name: str = "",
    ) -> Tuple[ProjectResponseModel, StackResponseModel]:
        """Validate the active configuration.

        Call this method to validate the supplied active project and active
        stack values.

        This method is guaranteed to return valid project ID and stack ID
        values. If the supplied project and stack are not set or are not valid
        (e.g. they do not exist or are not accessible), the default project and
        default project stack will be returned in their stead.

        Args:
            active_project_name_or_id: The name or ID of the active project.
            active_stack_id: The ID of the active stack.
            config_name: The name of the configuration to validate (used in the
                displayed logs/messages).

        Returns:
            A tuple containing the active project and active stack.
        """
        active_project: ProjectResponseModel

        if active_project_name_or_id:
            try:
                active_project = self.get_project(active_project_name_or_id)
            except KeyError:
                active_project = self._get_or_create_default_project()

                logger.warning(
                    f"The current {config_name} active project is no longer "
                    f"available. Resetting the active project to "
                    f"'{active_project.name}'."
                )
        else:
            active_project = self._get_or_create_default_project()

            logger.info(
                f"Setting the {config_name} active project "
                f"to '{active_project.name}'."
            )

        active_stack: StackResponseModel

        # Sanitize the active stack
        if active_stack_id:
            # Ensure that the active stack is still valid
            try:
                active_stack = self.get_stack(stack_id=active_stack_id)
            except KeyError:
                logger.warning(
                    "The current %s active stack is no longer available. "
                    "Resetting the active stack to default.",
                    config_name,
                )
                active_stack = self._get_or_create_default_stack(active_project)
            else:
                if active_stack.project.id != active_project.id:
                    logger.warning(
                        "The current %s active stack is not part of the active "
                        "project. Resetting the active stack to default.",
                        config_name,
                    )
                    active_stack = self._get_or_create_default_stack(
                        active_project
                    )
                elif not active_stack.is_shared and (
                    not active_stack.user
                    or (active_stack.user.id != self.active_user.id)
                ):
                    logger.warning(
                        "The current %s active stack is not shared and not "
                        "owned by the active user. "
                        "Resetting the active stack to default.",
                        config_name,
                    )
                    active_stack = self._get_or_create_default_stack(
                        active_project
                    )
        else:
            logger.warning(
                "Setting the %s active stack to default.",
                config_name,
            )
            active_stack = self._get_or_create_default_stack(active_project)

        return active_project, active_stack

    def get_store_info(self) -> ServerModel:
        """Get information about the store.

        Returns:
            Information about the store.
        """
        return ServerModel(
            id=GlobalConfiguration().user_id,
            version=zenml.__version__,
            deployment_type=os.environ.get(
                ENV_ZENML_SERVER_DEPLOYMENT_TYPE, ServerDeploymentType.OTHER
            ),
            database_type=ServerDatabaseType.OTHER,
        )

    def is_local_store(self) -> bool:
        """Check if the store is local or connected to a local ZenML server.

        Returns:
            True if the store is local, False otherwise.
        """
        return self.get_store_info().is_local()

    def _get_or_create_default_stack(
        self, project: "ProjectResponseModel"
    ) -> "StackResponseModel":
        try:
            return self._get_default_stack(
                project_name_or_id=project.id,
                user_name_or_id=self.active_user.id,
            )
        except KeyError:
            return self._create_default_stack(  # type: ignore[no-any-return]
                project_name_or_id=project.id,
                user_name_or_id=self.active_user.id,
            )

    def _get_or_create_default_project(self) -> "ProjectResponseModel":
        try:
            return self._default_project
        except KeyError:
            return self._create_default_project()  # type: ignore[no-any-return]

    # ------
    # Stacks
    # ------

    @track(AnalyticsEvent.REGISTERED_DEFAULT_STACK)
    def _create_default_stack(
        self,
        project_name_or_id: Union[str, UUID],
        user_name_or_id: Union[str, UUID],
    ) -> StackResponseModel:
        """Create the default stack components and stack.

        The default stack contains a local orchestrator and a local artifact
        store.

        Args:
            project_name_or_id: Name or ID of the project to which the stack
                belongs.
            user_name_or_id: The name or ID of the user that owns the stack.

        Returns:
            The model of the created default stack.
        """
        project = self.get_project(project_name_or_id=project_name_or_id)
        user = self.get_user(user_name_or_id=user_name_or_id)

        logger.info(
            f"Creating default stack for user '{user.name}' in project "
            f"{project.name}..."
        )

        # Register the default orchestrator
        orchestrator = self.create_stack_component(
            component=ComponentRequestModel(
                user=user.id,
                project=project.id,
                name=DEFAULT_STACK_COMPONENT_NAME,
                type=StackComponentType.ORCHESTRATOR,
                flavor="local",
                configuration={},
            ),
        )

        # Register the default artifact store
        artifact_store = self.create_stack_component(
            component=ComponentRequestModel(
                user=user.id,
                project=project.id,
                name=DEFAULT_STACK_COMPONENT_NAME,
                type=StackComponentType.ARTIFACT_STORE,
                flavor="local",
                configuration={},
            ),
        )

        components = {c.type: [c.id] for c in [orchestrator, artifact_store]}
        # Register the default stack
        stack = StackRequestModel(
            name=DEFAULT_STACK_NAME,
            components=components,
            is_shared=False,
            project=project.id,
            user=user.id,
        )
        return self.create_stack(stack=stack)

    def _get_default_stack(
        self,
        project_name_or_id: Union[str, UUID],
        user_name_or_id: Union[str, UUID],
    ) -> StackResponseModel:
        """Get the default stack for a user in a project.

        Args:
            project_name_or_id: Name or ID of the project.
            user_name_or_id: Name or ID of the user.

        Returns:
            The default stack in the project owned by the supplied user.

        Raises:
            KeyError: if the project or default stack doesn't exist.
        """
        default_stacks = self.list_stacks(
            project_name_or_id=project_name_or_id,
            user_name_or_id=user_name_or_id,
            name=DEFAULT_STACK_NAME,
        )
        if len(default_stacks) == 0:
            raise KeyError(
                f"No default stack found for user {str(user_name_or_id)} in "
                f"project {str(project_name_or_id)}"
            )
        return default_stacks[0]

    # -----
    # Roles
    # -----
    @property
    def _admin_role(self) -> RoleResponseModel:
        """Get the admin role.

        Returns:
            The default admin role.
        """
        return self.get_role(DEFAULT_ADMIN_ROLE)

    @track(AnalyticsEvent.CREATED_DEFAULT_ROLES)
    def _create_admin_role(self) -> RoleResponseModel:
        """Creates the admin role.

        Returns:
            The admin role
        """
        logger.info(f"Creating '{DEFAULT_ADMIN_ROLE}' role ...")
        return self.create_role(
            RoleRequestModel(
                name=DEFAULT_ADMIN_ROLE,
                permissions=[
                    PermissionType.READ.value,
                    PermissionType.WRITE.value,
                    PermissionType.ME.value,
                ],
            )
        )

    @property
    def _guest_role(self) -> RoleResponseModel:
        """Get the guest role.

        Returns:
            The guest role.
        """
        return self.get_role(DEFAULT_GUEST_ROLE)

    @track(AnalyticsEvent.CREATED_DEFAULT_ROLES)
    def _create_guest_role(self) -> RoleResponseModel:
        """Creates the guest role.

        Returns:
            The guest role
        """
        logger.info(f"Creating '{DEFAULT_GUEST_ROLE}' role ...")
        return self.create_role(
            RoleRequestModel(
                name=DEFAULT_GUEST_ROLE,
                permissions=[
                    PermissionType.READ.value,
                    PermissionType.ME.value,
                ],
            )
        )

    # -----
    # Users
    # -----

    @property
    def active_user(self) -> UserResponseModel:
        """The active user.

        Returns:
            The active user.
        """
        if self._active_user is None:
            self._active_user = self.get_user(self.active_user_name)
        return self._active_user

    @property
    def users(self) -> List[UserResponseModel]:
        """All existing users.

        Returns:
            A list of all existing users.
        """
        return self.list_users()

    @property
    def _default_user_name(self) -> str:
        """Get the default user name.

        Returns:
            The default user name.
        """
        return os.getenv(ENV_ZENML_DEFAULT_USER_NAME, DEFAULT_USERNAME)

    @property
    def _default_user(self) -> UserResponseModel:
        """Get the default user.

        Returns:
            The default user.

        Raises:
            KeyError: If the default user doesn't exist.
        """
        user_name = self._default_user_name
        try:
            return self.get_user(user_name)
        except KeyError:
            raise KeyError(f"The default user '{user_name}' is not configured")

    @track(AnalyticsEvent.CREATED_DEFAULT_USER)
    def _create_default_user(self) -> UserResponseModel:
        """Creates a default user with the admin role.

        Returns:
            The default user.
        """
        user_name = os.getenv(ENV_ZENML_DEFAULT_USER_NAME, DEFAULT_USERNAME)
        user_password = os.getenv(
            ENV_ZENML_DEFAULT_USER_PASSWORD, DEFAULT_PASSWORD
        )

        logger.info(f"Creating default user '{user_name}' ...")
        new_user = self.create_user(
            UserRequestModel(
                name=user_name,
                active=True,
                password=user_password,
            )
        )
        self.create_role_assignment(
            RoleAssignmentRequestModel(
                role=self._admin_role.id,
                user=new_user.id,
                project=None,
                is_user=True,
            )
        )
        return new_user

    # -----
    # Roles
    # -----

    @property
    def roles(self) -> List[RoleResponseModel]:
        """All existing roles.

        Returns:
            A list of all existing roles.
        """
        return self.list_roles()

    @property
    def role_assignments(self) -> List[RoleAssignmentResponseModel]:
        """All role assignments.

        Returns:
            A list of all role assignments.
        """
        return self.list_role_assignments(user_name_or_id=self.active_user_name)

    # --------
    # Projects
    # --------

    @property
    def _default_project_name(self) -> str:
        """Get the default project name.

        Returns:
            The default project name.
        """
        return os.getenv(ENV_ZENML_DEFAULT_PROJECT_NAME, DEFAULT_PROJECT_NAME)

    @property
    def _default_project(self) -> ProjectResponseModel:
        """Get the default project.

        Returns:
            The default project.

        Raises:
            KeyError: if the default project doesn't exist.
        """
        project_name = self._default_project_name
        try:
            return self.get_project(project_name)
        except KeyError:
            raise KeyError(
                f"The default project '{project_name}' is not configured"
            )

    @track(AnalyticsEvent.CREATED_DEFAULT_PROJECT)
    def _create_default_project(self) -> ProjectResponseModel:
        """Creates a default project.

        Returns:
            The default project.
        """
        project_name = self._default_project_name
        logger.info(f"Creating default project '{project_name}' ...")
        return self.create_project(ProjectRequestModel(name=project_name))

    # ---------
    # Analytics
    # ---------

    def track_event(
        self,
        event: Union[str, AnalyticsEvent],
        metadata: Optional[Dict[str, Any]] = None,
    ) -> None:
        """Track an analytics event.

        Args:
            event: The event to track.
            metadata: Additional metadata to track with the event.
        """
        if self.track_analytics:
            # Server information is always tracked, if available.
            track_event(event, metadata)

    class Config:
        """Pydantic configuration class."""

        # Validate attributes when assigning them. We need to set this in order
        # to have a mix of mutable and immutable attributes
        validate_assignment = True
        # Ignore extra attributes from configs of previous ZenML versions
        extra = "ignore"
        # all attributes with leading underscore are private and therefore
        # are mutable and not included in serialization
        underscore_attrs_are_private = True
active_user: UserResponseModel property readonly

The active user.

Returns:

Type Description
UserResponseModel

The active user.

role_assignments: List[zenml.models.role_assignment_models.RoleAssignmentResponseModel] property readonly

All role assignments.

Returns:

Type Description
List[zenml.models.role_assignment_models.RoleAssignmentResponseModel]

A list of all role assignments.

roles: List[zenml.models.role_models.RoleResponseModel] property readonly

All existing roles.

Returns:

Type Description
List[zenml.models.role_models.RoleResponseModel]

A list of all existing roles.

type: StoreType property readonly

The type of the store.

Returns:

Type Description
StoreType

The type of the store.

url: str property readonly

The URL of the store.

Returns:

Type Description
str

The URL of the store.

users: List[zenml.models.user_models.UserResponseModel] property readonly

All existing users.

Returns:

Type Description
List[zenml.models.user_models.UserResponseModel]

A list of all existing users.

Config

Pydantic configuration class.

Source code in zenml/zen_stores/base_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Validate attributes when assigning them. We need to set this in order
    # to have a mix of mutable and immutable attributes
    validate_assignment = True
    # Ignore extra attributes from configs of previous ZenML versions
    extra = "ignore"
    # all attributes with leading underscore are private and therefore
    # are mutable and not included in serialization
    underscore_attrs_are_private = True
__init__(self, skip_default_registrations=False, **kwargs) special

Create and initialize a store.

Parameters:

Name Type Description Default
skip_default_registrations bool

If True, the creation of the default stack and user in the store will be skipped.

False
**kwargs Any

Additional keyword arguments to pass to the Pydantic constructor.

{}

Exceptions:

Type Description
RuntimeError

If the store cannot be initialized.

Source code in zenml/zen_stores/base_zen_store.py
def __init__(
    self,
    skip_default_registrations: bool = False,
    **kwargs: Any,
) -> None:
    """Create and initialize a store.

    Args:
        skip_default_registrations: If `True`, the creation of the default
            stack and user in the store will be skipped.
        **kwargs: Additional keyword arguments to pass to the Pydantic
            constructor.

    Raises:
        RuntimeError: If the store cannot be initialized.
    """
    super().__init__(**kwargs)

    try:
        self._initialize()
    except Exception as e:
        raise RuntimeError(
            f"Error initializing {self.type.value} store with URL "
            f"'{self.url}': {str(e)}"
        ) from e

    if not skip_default_registrations:
        logger.debug("Initializing database")
        self._initialize_database()
    else:
        logger.debug("Skipping database initialization")
create_store(config, skip_default_registrations=False, **kwargs) staticmethod

Create and initialize a store from a store configuration.

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to use.

required
skip_default_registrations bool

If True, the creation of the default stack and user in the store will be skipped.

False
**kwargs Any

Additional keyword arguments to pass to the store class

{}

Returns:

Type Description
BaseZenStore

The initialized store.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def create_store(
    config: StoreConfiguration,
    skip_default_registrations: bool = False,
    **kwargs: Any,
) -> "BaseZenStore":
    """Create and initialize a store from a store configuration.

    Args:
        config: The store configuration to use.
        skip_default_registrations: If `True`, the creation of the default
            stack and user in the store will be skipped.
        **kwargs: Additional keyword arguments to pass to the store class

    Returns:
        The initialized store.
    """
    logger.debug(f"Creating store with config '{config}'...")
    store_class = BaseZenStore.get_store_class(config.type)
    store = store_class(
        config=config,
        skip_default_registrations=skip_default_registrations,
        **kwargs,
    )
    return store
get_default_store_config(path) staticmethod

Get the default store configuration.

The default store is a SQLite store that saves the DB contents on the local filesystem.

Parameters:

Name Type Description Default
path str

The local path where the store DB will be stored.

required

Returns:

Type Description
StoreConfiguration

The default store configuration.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def get_default_store_config(path: str) -> StoreConfiguration:
    """Get the default store configuration.

    The default store is a SQLite store that saves the DB contents on the
    local filesystem.

    Args:
        path: The local path where the store DB will be stored.

    Returns:
        The default store configuration.
    """
    from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

    config = SqlZenStoreConfiguration(
        type=StoreType.SQL, url=SqlZenStoreConfiguration.get_local_url(path)
    )
    return config
get_store_class(store_type) staticmethod

Returns the class of the given store type.

Parameters:

Name Type Description Default
store_type StoreType

The type of the store to get the class for.

required

Returns:

Type Description
Type[BaseZenStore]

The class of the given store type or None if the type is unknown.

Exceptions:

Type Description
TypeError

If the store type is unsupported.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def get_store_class(store_type: StoreType) -> Type["BaseZenStore"]:
    """Returns the class of the given store type.

    Args:
        store_type: The type of the store to get the class for.

    Returns:
        The class of the given store type or None if the type is unknown.

    Raises:
        TypeError: If the store type is unsupported.
    """
    if store_type == StoreType.SQL:
        from zenml.zen_stores.sql_zen_store import SqlZenStore

        return SqlZenStore
    elif store_type == StoreType.REST:
        from zenml.zen_stores.rest_zen_store import RestZenStore

        return RestZenStore
    else:
        raise TypeError(
            f"No store implementation found for store type "
            f"`{store_type.value}`."
        )
get_store_config_class(store_type) staticmethod

Returns the store config class of the given store type.

Parameters:

Name Type Description Default
store_type StoreType

The type of the store to get the class for.

required

Returns:

Type Description
Type[StoreConfiguration]

The config class of the given store type.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def get_store_config_class(
    store_type: StoreType,
) -> Type["StoreConfiguration"]:
    """Returns the store config class of the given store type.

    Args:
        store_type: The type of the store to get the class for.

    Returns:
        The config class of the given store type.
    """
    store_class = BaseZenStore.get_store_class(store_type)
    return store_class.CONFIG_TYPE
get_store_info(self)

Get information about the store.

Returns:

Type Description
ServerModel

Information about the store.

Source code in zenml/zen_stores/base_zen_store.py
def get_store_info(self) -> ServerModel:
    """Get information about the store.

    Returns:
        Information about the store.
    """
    return ServerModel(
        id=GlobalConfiguration().user_id,
        version=zenml.__version__,
        deployment_type=os.environ.get(
            ENV_ZENML_SERVER_DEPLOYMENT_TYPE, ServerDeploymentType.OTHER
        ),
        database_type=ServerDatabaseType.OTHER,
    )
get_store_type(url) staticmethod

Returns the store type associated with a URL schema.

Parameters:

Name Type Description Default
url str

The store URL.

required

Returns:

Type Description
StoreType

The store type associated with the supplied URL schema.

Exceptions:

Type Description
TypeError

If no store type was found to support the supplied URL.

Source code in zenml/zen_stores/base_zen_store.py
@staticmethod
def get_store_type(url: str) -> StoreType:
    """Returns the store type associated with a URL schema.

    Args:
        url: The store URL.

    Returns:
        The store type associated with the supplied URL schema.

    Raises:
        TypeError: If no store type was found to support the supplied URL.
    """
    from zenml.zen_stores.rest_zen_store import RestZenStoreConfiguration
    from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

    if SqlZenStoreConfiguration.supports_url_scheme(url):
        return StoreType.SQL
    elif RestZenStoreConfiguration.supports_url_scheme(url):
        return StoreType.REST
    else:
        raise TypeError(f"No store implementation found for URL: {url}.")
is_local_store(self)

Check if the store is local or connected to a local ZenML server.

Returns:

Type Description
bool

True if the store is local, False otherwise.

Source code in zenml/zen_stores/base_zen_store.py
def is_local_store(self) -> bool:
    """Check if the store is local or connected to a local ZenML server.

    Returns:
        True if the store is local, False otherwise.
    """
    return self.get_store_info().is_local()
track_event(self, event, metadata=None)

Track an analytics event.

Parameters:

Name Type Description Default
event Union[str, zenml.utils.analytics_utils.AnalyticsEvent]

The event to track.

required
metadata Optional[Dict[str, Any]]

Additional metadata to track with the event.

None
Source code in zenml/zen_stores/base_zen_store.py
def track_event(
    self,
    event: Union[str, AnalyticsEvent],
    metadata: Optional[Dict[str, Any]] = None,
) -> None:
    """Track an analytics event.

    Args:
        event: The event to track.
        metadata: Additional metadata to track with the event.
    """
    if self.track_analytics:
        # Server information is always tracked, if available.
        track_event(event, metadata)
validate_active_config(self, active_project_name_or_id=None, active_stack_id=None, config_name='')

Validate the active configuration.

Call this method to validate the supplied active project and active stack values.

This method is guaranteed to return valid project ID and stack ID values. If the supplied project and stack are not set or are not valid (e.g. they do not exist or are not accessible), the default project and default project stack will be returned in their stead.

Parameters:

Name Type Description Default
active_project_name_or_id Union[str, uuid.UUID]

The name or ID of the active project.

None
active_stack_id Optional[uuid.UUID]

The ID of the active stack.

None
config_name str

The name of the configuration to validate (used in the displayed logs/messages).

''

Returns:

Type Description
Tuple[zenml.models.project_models.ProjectResponseModel, zenml.models.stack_models.StackResponseModel]

A tuple containing the active project and active stack.

Source code in zenml/zen_stores/base_zen_store.py
def validate_active_config(
    self,
    active_project_name_or_id: Optional[Union[str, UUID]] = None,
    active_stack_id: Optional[UUID] = None,
    config_name: str = "",
) -> Tuple[ProjectResponseModel, StackResponseModel]:
    """Validate the active configuration.

    Call this method to validate the supplied active project and active
    stack values.

    This method is guaranteed to return valid project ID and stack ID
    values. If the supplied project and stack are not set or are not valid
    (e.g. they do not exist or are not accessible), the default project and
    default project stack will be returned in their stead.

    Args:
        active_project_name_or_id: The name or ID of the active project.
        active_stack_id: The ID of the active stack.
        config_name: The name of the configuration to validate (used in the
            displayed logs/messages).

    Returns:
        A tuple containing the active project and active stack.
    """
    active_project: ProjectResponseModel

    if active_project_name_or_id:
        try:
            active_project = self.get_project(active_project_name_or_id)
        except KeyError:
            active_project = self._get_or_create_default_project()

            logger.warning(
                f"The current {config_name} active project is no longer "
                f"available. Resetting the active project to "
                f"'{active_project.name}'."
            )
    else:
        active_project = self._get_or_create_default_project()

        logger.info(
            f"Setting the {config_name} active project "
            f"to '{active_project.name}'."
        )

    active_stack: StackResponseModel

    # Sanitize the active stack
    if active_stack_id:
        # Ensure that the active stack is still valid
        try:
            active_stack = self.get_stack(stack_id=active_stack_id)
        except KeyError:
            logger.warning(
                "The current %s active stack is no longer available. "
                "Resetting the active stack to default.",
                config_name,
            )
            active_stack = self._get_or_create_default_stack(active_project)
        else:
            if active_stack.project.id != active_project.id:
                logger.warning(
                    "The current %s active stack is not part of the active "
                    "project. Resetting the active stack to default.",
                    config_name,
                )
                active_stack = self._get_or_create_default_stack(
                    active_project
                )
            elif not active_stack.is_shared and (
                not active_stack.user
                or (active_stack.user.id != self.active_user.id)
            ):
                logger.warning(
                    "The current %s active stack is not shared and not "
                    "owned by the active user. "
                    "Resetting the active stack to default.",
                    config_name,
                )
                active_stack = self._get_or_create_default_stack(
                    active_project
                )
    else:
        logger.warning(
            "Setting the %s active stack to default.",
            config_name,
        )
        active_stack = self._get_or_create_default_stack(active_project)

    return active_project, active_stack

metadata_store

Base implementation of a metadata store.

MLMDArtifactModel (BaseModel) pydantic-model

Class that models an artifact response from the metadata store.

Source code in zenml/zen_stores/metadata_store.py
class MLMDArtifactModel(BaseModel):
    """Class that models an artifact response from the metadata store."""

    mlmd_id: int
    type: ArtifactType
    uri: str
    materializer: str
    data_type: str
    mlmd_parent_step_id: int
    mlmd_producer_step_id: int
    is_cached: bool

MLMDPipelineRunModel (BaseModel) pydantic-model

Class that models a pipeline run response from the metadata store.

Source code in zenml/zen_stores/metadata_store.py
class MLMDPipelineRunModel(BaseModel):
    """Class that models a pipeline run response from the metadata store."""

    mlmd_id: int
    name: str
    project: Optional[UUID]
    user: Optional[UUID]
    pipeline_id: Optional[UUID]
    stack_id: Optional[UUID]
    pipeline_configuration: Dict[str, Any]
    num_steps: Optional[int]

MLMDStepRunModel (BaseModel) pydantic-model

Class that models a step run response from the metadata store.

Source code in zenml/zen_stores/metadata_store.py
class MLMDStepRunModel(BaseModel):
    """Class that models a step run response from the metadata store."""

    mlmd_id: int
    mlmd_parent_step_ids: List[int]
    entrypoint_name: str
    name: str
    parameters: Dict[str, str]
    step_configuration: Dict[str, Any]
    docstring: Optional[str]
    num_outputs: Optional[int]

MetadataStore

ZenML MLMD metadata store.

Source code in zenml/zen_stores/metadata_store.py
class MetadataStore:
    """ZenML MLMD metadata store."""

    upgrade_migration_enabled: bool = True
    store: metadata_store.MetadataStore

    def __init__(self, config: metadata_store_pb2.ConnectionConfig) -> None:
        """Initializes the metadata store.

        Args:
            config: The connection configuration for the metadata store.
        """
        self.store = metadata_store.MetadataStore(
            config, enable_upgrade_migration=True
        )

    @property
    def step_type_mapping(self) -> Dict[int, str]:
        """Maps type_ids to step names.

        Returns:
            Dict[int, str]: a mapping from type_ids to step names.
        """
        return {
            type_.id: type_.name for type_ in self.store.get_execution_types()
        }

    def _check_if_executions_belong_to_pipeline(
        self,
        executions: List[proto.Execution],
        pipeline_id: int,
    ) -> bool:
        """Returns `True` if the executions are associated with the pipeline context.

        Args:
            executions: List of executions.
            pipeline_id: The ID of the pipeline to check.

        Returns:
            `True` if the executions are associated with the pipeline context.
        """
        for execution in executions:
            associated_contexts = self.store.get_contexts_by_execution(
                execution.id
            )
            for context in associated_contexts:
                if context.id == pipeline_id:  # noqa
                    return True
        return False

    def _get_zenml_execution_context_properties(
        self, execution: proto.Execution
    ) -> Any:
        associated_contexts = self.store.get_contexts_by_execution(execution.id)
        for context in associated_contexts:
            context_type = self.store.get_context_types_by_id(
                [context.type_id]
            )[0].name
            if context_type == ZENML_CONTEXT_TYPE_NAME:
                return context.custom_properties
        raise RuntimeError(
            f"Could not find 'zenml' context for execution {execution.name}."
        )

    def _get_step_model_from_execution(
        self, execution: proto.Execution
    ) -> MLMDStepRunModel:
        """Get the original step from an execution.

        Args:
            execution: proto.Execution object from mlmd store.

        Returns:
            Model of the original step derived from the proto.Execution.

        Raises:
            KeyError: If the execution is not associated with a step.
        """
        from zenml.steps.utils import (
            INTERNAL_EXECUTION_PARAMETER_PREFIX,
            PARAM_PIPELINE_PARAMETER_NAME,
        )

        impl_name = self.step_type_mapping[execution.type_id].split(".")[-1]

        step_name_property = execution.custom_properties.get(
            INTERNAL_EXECUTION_PARAMETER_PREFIX + PARAM_PIPELINE_PARAMETER_NAME,
            None,
        )
        if step_name_property:
            step_name = json.loads(step_name_property.string_value)
        else:
            raise KeyError(
                f"Step name missing for execution with ID {execution.id}. "
                f"This error probably occurs because you're using ZenML "
                f"version 0.5.4 or newer but your metadata store contains "
                f"data from previous versions."
            )

        step_parameters = {}
        for k, v in execution.custom_properties.items():
            if not k.startswith(INTERNAL_EXECUTION_PARAMETER_PREFIX):
                try:
                    json.loads(v.string_value)
                    step_parameters[k] = v.string_value
                except JSONDecodeError:
                    # this means there is a property in there that is neither
                    # an internal one or one created by zenml. Therefore, we can
                    # ignore it
                    pass

        step_context_properties = self._get_zenml_execution_context_properties(
            execution=execution,
        )
        if MLMD_CONTEXT_STEP_CONFIG_PROPERTY_NAME in step_context_properties:
            step_configuration = json.loads(
                step_context_properties.get(
                    MLMD_CONTEXT_STEP_CONFIG_PROPERTY_NAME
                ).string_value
            )
        else:
            step_configuration = {}

        # Extract docstring.
        docstring = None
        if "config" in step_configuration:
            step_configuration_config = step_configuration["config"]
            if "docstring" in step_configuration_config:
                docstring = step_configuration_config["docstring"]

        # Get number of outputs.
        if MLMD_CONTEXT_NUM_OUTPUTS_PROPERTY_NAME in step_context_properties:
            num_outputs = int(
                step_context_properties.get(
                    MLMD_CONTEXT_NUM_OUTPUTS_PROPERTY_NAME
                ).string_value
            )
        else:
            num_outputs = None

        # TODO [ENG-222]: This is a lot of querying to the metadata store. We
        #  should refactor and make it nicer. Probably it makes more sense
        #  to first get `executions_ids_for_current_run` and then filter on
        #  `event.execution_id in execution_ids_for_current_run`.
        # Core logic here is that we get the event of this particular execution
        # id that gives us the artifacts of this execution. We then go through
        # all `input` artifacts of this execution and get all events related to
        # that artifact. This in turn gives us other events for which this
        # artifact was an `output` artifact. Then we simply need to sort by
        # time to get the most recent execution (i.e. step) that produced that
        # particular artifact.
        events_for_execution = self.store.get_events_by_execution_ids(
            [execution.id]
        )

        parents_step_ids = set()
        for current_event in events_for_execution:
            if current_event.type == current_event.INPUT:
                # this means the artifact is an input artifact
                events_for_input_artifact = [
                    e
                    for e in self.store.get_events_by_artifact_ids(
                        [current_event.artifact_id]
                    )
                    # should be output type and should NOT be the same id as
                    # the execution we are querying, and it should be BEFORE
                    # the time of the current event.
                    if e.type == e.OUTPUT
                    and e.execution_id != current_event.execution_id
                    and e.milliseconds_since_epoch
                    < current_event.milliseconds_since_epoch
                ]

                # sort by time
                events_for_input_artifact.sort(
                    key=lambda x: x.milliseconds_since_epoch  # type: ignore[no-any-return] # noqa
                )
                # take the latest one and add execution to the parents.
                parents_step_ids.add(events_for_input_artifact[-1].execution_id)

        return MLMDStepRunModel(
            mlmd_id=execution.id,
            mlmd_parent_step_ids=list(parents_step_ids),
            entrypoint_name=impl_name,
            name=step_name,
            parameters=step_parameters or {},
            step_configuration=step_configuration or {},
            docstring=docstring,
            num_outputs=num_outputs,
        )

    def _get_pipeline_run_model_from_context(
        self, context: proto.Context
    ) -> MLMDPipelineRunModel:

        project, user, pipeline_id, stack_id = None, None, None, None
        pipeline_configuration = {}
        num_steps = None

        executions = self.store.get_executions_by_context(context_id=context.id)
        if len(executions) > 0:
            context_properties = self._get_zenml_execution_context_properties(
                executions[-1]
            )

            if MLMD_CONTEXT_MODEL_IDS_PROPERTY_NAME in context_properties:
                model_ids = json.loads(
                    context_properties.get(
                        MLMD_CONTEXT_MODEL_IDS_PROPERTY_NAME
                    ).string_value
                )
                project = model_ids.get("project_id")
                user = model_ids.get("user_id")
                pipeline_id = model_ids.get("pipeline_id")
                stack_id = model_ids.get("stack_id")

            if MLMD_CONTEXT_PIPELINE_CONFIG_PROPERTY_NAME in context_properties:
                pipeline_configuration = json.loads(
                    context_properties.get(
                        MLMD_CONTEXT_PIPELINE_CONFIG_PROPERTY_NAME
                    ).string_value
                )

            if MLMD_CONTEXT_NUM_STEPS_PROPERTY_NAME in context_properties:
                num_steps = int(
                    context_properties.get(
                        MLMD_CONTEXT_NUM_STEPS_PROPERTY_NAME
                    ).string_value
                )

        return MLMDPipelineRunModel(
            mlmd_id=context.id,
            name=context.name,
            project=project,
            user=user,
            pipeline_id=pipeline_id,
            stack_id=stack_id,
            pipeline_configuration=pipeline_configuration or {},
            num_steps=num_steps,
        )

    def get_all_runs(
        self, ignored_ids: Optional[List[int]] = None
    ) -> List[MLMDPipelineRunModel]:
        """Gets a mapping run name -> ID for all runs registered in MLMD.

        Args:
            ignored_ids: A list of run IDs to ignore.

        Returns:
            A mapping run name -> ID for all runs registered in MLMD.
        """
        from tfx.dsl.compiler.constants import PIPELINE_RUN_CONTEXT_TYPE_NAME

        run_contexts = self.store.get_contexts_by_type(
            PIPELINE_RUN_CONTEXT_TYPE_NAME
        )
        return [
            self._get_pipeline_run_model_from_context(run_context)
            for run_context in run_contexts
            if not ignored_ids or run_context.id not in ignored_ids
        ]

    def get_pipeline_run_steps(
        self, run_id: int
    ) -> Dict[str, MLMDStepRunModel]:
        """Gets all steps for the given pipeline run.

        Args:
            run_id: The ID of the pipeline run to get the steps for.

        Returns:
            A dictionary of step names to step views.
        """
        steps: Dict[str, MLMDStepRunModel] = OrderedDict()
        # reverse the executions as they get returned in reverse chronological
        # order from the metadata store
        executions = self.store.get_executions_by_context(run_id)
        for execution in reversed(executions):  # noqa
            step = self._get_step_model_from_execution(execution)
            steps[step.name] = step
        logger.debug(f"Fetched {len(steps)} steps for pipeline run '{run_id}'.")
        return steps

    def get_step_by_id(self, step_id: int) -> MLMDStepRunModel:
        """Gets a step by its ID.

        Args:
            step_id: The ID of the step to get.

        Returns:
            A model of the step with the given ID.
        """
        execution = self.store.get_executions_by_id([step_id])[0]
        return self._get_step_model_from_execution(execution)

    def get_step_status(self, step_id: int) -> ExecutionStatus:
        """Gets the execution status of a single step.

        Args:
            step_id: The ID of the step to get the status for.

        Returns:
            ExecutionStatus: The status of the step.
        """
        proto = self.store.get_executions_by_id([step_id])[0]  # noqa
        state = proto.last_known_state

        if state == proto.COMPLETE:
            return ExecutionStatus.COMPLETED
        elif state == proto.RUNNING:
            return ExecutionStatus.RUNNING
        elif state == proto.CACHED:
            return ExecutionStatus.CACHED
        else:
            return ExecutionStatus.FAILED

    def _get_artifact_model_from_proto(
        self, artifact_proto: Artifact, parent_step_id: int
    ) -> MLMDArtifactModel:
        """Gets a model of an artifact from its proto.

        Args:
            artifact_proto: The proto of the artifact to get the model for.
            parent_step_id: The ID of the parent step.

        Returns:
            A model of the artifact.
        """
        from zenml.artifacts.constants import (
            DATATYPE_PROPERTY_KEY,
            MATERIALIZER_PROPERTY_KEY,
        )

        # maps artifact types to their string representation
        artifact_type_mapping = {
            type_.id: type_.name for type_ in self.store.get_artifact_types()
        }
        artifact_type = artifact_type_mapping[artifact_proto.type_id]

        materializer = artifact_proto.properties[
            MATERIALIZER_PROPERTY_KEY
        ].string_value

        data_type = artifact_proto.properties[
            DATATYPE_PROPERTY_KEY
        ].string_value

        artifact_id = artifact_proto.id
        producer_step = self.get_producer_step_from_artifact(artifact_id)
        producer_step_id = producer_step.mlmd_id
        artifact = MLMDArtifactModel(
            mlmd_id=artifact_id,
            type=artifact_type,
            uri=artifact_proto.uri,
            materializer=materializer,
            data_type=data_type,
            mlmd_parent_step_id=parent_step_id,
            mlmd_producer_step_id=producer_step_id,
            is_cached=parent_step_id != producer_step_id,
        )
        return artifact

    def get_step_input_artifacts(
        self,
        step_id: int,
        step_parent_step_ids: List[int],
    ) -> Dict[str, MLMDArtifactModel]:
        """Returns input artifacts for the given step.

        Args:
            step_id: The ID of the step to get the input artifacts for.
            step_parent_step_ids: The IDs of the parent steps of the given step.

        Returns:
            A dict mapping input names to input artifacts.
        """
        events = self.store.get_events_by_execution_ids([step_id])  # noqa
        events = [event for event in events if event.type == event.INPUT]
        input_artifact_ids = [event.artifact_id for event in events]
        artifacts = self.store.get_artifacts_by_id(input_artifact_ids)
        events.sort(key=lambda x: x.artifact_id)
        artifacts.sort(key=lambda x: x.id)

        inputs: Dict[str, MLMDArtifactModel] = {}
        for event_proto, artifact_proto in zip(events, artifacts):
            assert event_proto.artifact_id == artifact_proto.id
            artifact_name = event_proto.path.steps[0].key

            # In the case that this is an input event, we actually need
            # to resolve the parent step ID via its parents outputs.
            parent_step_id = None
            for parent_id in step_parent_step_ids:
                self.get_step_by_id(parent_id)
                parent_outputs = self.get_step_output_artifacts(
                    step_id=parent_id,
                )
                for parent_output in parent_outputs.values():
                    if artifact_proto.id == parent_output.mlmd_id:
                        parent_step_id = parent_id
            assert parent_step_id is not None

            artifact = self._get_artifact_model_from_proto(
                artifact_proto, parent_step_id=parent_step_id
            )
            inputs[artifact_name] = artifact

        logger.debug("Fetched %d inputs for step '%d'.", len(inputs), step_id)
        return inputs

    def get_step_output_artifacts(
        self, step_id: int
    ) -> Dict[str, MLMDArtifactModel]:
        """Returns the output artifacts for the given step.

        Args:
            step_id: The ID of the step to get the output artifacts for.

        Returns:
            A dict mapping output names to output artifacts.
        """
        events = self.store.get_events_by_execution_ids([step_id])  # noqa
        events = [event for event in events if event.type == event.OUTPUT]
        output_artifact_ids = [event.artifact_id for event in events]
        artifacts = self.store.get_artifacts_by_id(output_artifact_ids)
        events.sort(key=lambda x: x.artifact_id)
        artifacts.sort(key=lambda x: x.id)

        outputs: Dict[str, MLMDArtifactModel] = {}
        for event_proto, artifact_proto in zip(events, artifacts):
            assert event_proto.artifact_id == artifact_proto.id
            artifact_name = event_proto.path.steps[0].key
            artifact = self._get_artifact_model_from_proto(
                artifact_proto, parent_step_id=step_id
            )
            outputs[artifact_name] = artifact

        logger.debug("Fetched %d outputs for step '%d'.", len(outputs), step_id)
        return outputs

    def get_producer_step_from_artifact(
        self, artifact_id: int
    ) -> MLMDStepRunModel:
        """Find the original step that created an artifact.

        Args:
            artifact_id: ID of the artifact for which to get the producer step.

        Returns:
            Original step that produced the artifact.
        """
        executions_ids = set(
            event.execution_id
            for event in self.store.get_events_by_artifact_ids([artifact_id])
            if event.type == event.OUTPUT
        )
        execution = self.store.get_executions_by_id(executions_ids)[0]
        return self._get_step_model_from_execution(execution)

    class Config:
        """Pydantic configuration class."""

        # public attributes are immutable
        allow_mutation = False
        # all attributes with leading underscore are private and therefore
        # are mutable and not included in serialization
        underscore_attrs_are_private = True
        # prevent extra attributes during model initialization
        extra = Extra.forbid
step_type_mapping: Dict[int, str] property readonly

Maps type_ids to step names.

Returns:

Type Description
Dict[int, str]

a mapping from type_ids to step names.

Config

Pydantic configuration class.

Source code in zenml/zen_stores/metadata_store.py
class Config:
    """Pydantic configuration class."""

    # public attributes are immutable
    allow_mutation = False
    # all attributes with leading underscore are private and therefore
    # are mutable and not included in serialization
    underscore_attrs_are_private = True
    # prevent extra attributes during model initialization
    extra = Extra.forbid
__init__(self, config) special

Initializes the metadata store.

Parameters:

Name Type Description Default
config ConnectionConfig

The connection configuration for the metadata store.

required
Source code in zenml/zen_stores/metadata_store.py
def __init__(self, config: metadata_store_pb2.ConnectionConfig) -> None:
    """Initializes the metadata store.

    Args:
        config: The connection configuration for the metadata store.
    """
    self.store = metadata_store.MetadataStore(
        config, enable_upgrade_migration=True
    )
get_all_runs(self, ignored_ids=None)

Gets a mapping run name -> ID for all runs registered in MLMD.

Parameters:

Name Type Description Default
ignored_ids Optional[List[int]]

A list of run IDs to ignore.

None

Returns:

Type Description
List[zenml.zen_stores.metadata_store.MLMDPipelineRunModel]

A mapping run name -> ID for all runs registered in MLMD.

Source code in zenml/zen_stores/metadata_store.py
def get_all_runs(
    self, ignored_ids: Optional[List[int]] = None
) -> List[MLMDPipelineRunModel]:
    """Gets a mapping run name -> ID for all runs registered in MLMD.

    Args:
        ignored_ids: A list of run IDs to ignore.

    Returns:
        A mapping run name -> ID for all runs registered in MLMD.
    """
    from tfx.dsl.compiler.constants import PIPELINE_RUN_CONTEXT_TYPE_NAME

    run_contexts = self.store.get_contexts_by_type(
        PIPELINE_RUN_CONTEXT_TYPE_NAME
    )
    return [
        self._get_pipeline_run_model_from_context(run_context)
        for run_context in run_contexts
        if not ignored_ids or run_context.id not in ignored_ids
    ]
get_pipeline_run_steps(self, run_id)

Gets all steps for the given pipeline run.

Parameters:

Name Type Description Default
run_id int

The ID of the pipeline run to get the steps for.

required

Returns:

Type Description
Dict[str, zenml.zen_stores.metadata_store.MLMDStepRunModel]

A dictionary of step names to step views.

Source code in zenml/zen_stores/metadata_store.py
def get_pipeline_run_steps(
    self, run_id: int
) -> Dict[str, MLMDStepRunModel]:
    """Gets all steps for the given pipeline run.

    Args:
        run_id: The ID of the pipeline run to get the steps for.

    Returns:
        A dictionary of step names to step views.
    """
    steps: Dict[str, MLMDStepRunModel] = OrderedDict()
    # reverse the executions as they get returned in reverse chronological
    # order from the metadata store
    executions = self.store.get_executions_by_context(run_id)
    for execution in reversed(executions):  # noqa
        step = self._get_step_model_from_execution(execution)
        steps[step.name] = step
    logger.debug(f"Fetched {len(steps)} steps for pipeline run '{run_id}'.")
    return steps
get_producer_step_from_artifact(self, artifact_id)

Find the original step that created an artifact.

Parameters:

Name Type Description Default
artifact_id int

ID of the artifact for which to get the producer step.

required

Returns:

Type Description
MLMDStepRunModel

Original step that produced the artifact.

Source code in zenml/zen_stores/metadata_store.py
def get_producer_step_from_artifact(
    self, artifact_id: int
) -> MLMDStepRunModel:
    """Find the original step that created an artifact.

    Args:
        artifact_id: ID of the artifact for which to get the producer step.

    Returns:
        Original step that produced the artifact.
    """
    executions_ids = set(
        event.execution_id
        for event in self.store.get_events_by_artifact_ids([artifact_id])
        if event.type == event.OUTPUT
    )
    execution = self.store.get_executions_by_id(executions_ids)[0]
    return self._get_step_model_from_execution(execution)
get_step_by_id(self, step_id)

Gets a step by its ID.

Parameters:

Name Type Description Default
step_id int

The ID of the step to get.

required

Returns:

Type Description
MLMDStepRunModel

A model of the step with the given ID.

Source code in zenml/zen_stores/metadata_store.py
def get_step_by_id(self, step_id: int) -> MLMDStepRunModel:
    """Gets a step by its ID.

    Args:
        step_id: The ID of the step to get.

    Returns:
        A model of the step with the given ID.
    """
    execution = self.store.get_executions_by_id([step_id])[0]
    return self._get_step_model_from_execution(execution)
get_step_input_artifacts(self, step_id, step_parent_step_ids)

Returns input artifacts for the given step.

Parameters:

Name Type Description Default
step_id int

The ID of the step to get the input artifacts for.

required
step_parent_step_ids List[int]

The IDs of the parent steps of the given step.

required

Returns:

Type Description
Dict[str, zenml.zen_stores.metadata_store.MLMDArtifactModel]

A dict mapping input names to input artifacts.

Source code in zenml/zen_stores/metadata_store.py
def get_step_input_artifacts(
    self,
    step_id: int,
    step_parent_step_ids: List[int],
) -> Dict[str, MLMDArtifactModel]:
    """Returns input artifacts for the given step.

    Args:
        step_id: The ID of the step to get the input artifacts for.
        step_parent_step_ids: The IDs of the parent steps of the given step.

    Returns:
        A dict mapping input names to input artifacts.
    """
    events = self.store.get_events_by_execution_ids([step_id])  # noqa
    events = [event for event in events if event.type == event.INPUT]
    input_artifact_ids = [event.artifact_id for event in events]
    artifacts = self.store.get_artifacts_by_id(input_artifact_ids)
    events.sort(key=lambda x: x.artifact_id)
    artifacts.sort(key=lambda x: x.id)

    inputs: Dict[str, MLMDArtifactModel] = {}
    for event_proto, artifact_proto in zip(events, artifacts):
        assert event_proto.artifact_id == artifact_proto.id
        artifact_name = event_proto.path.steps[0].key

        # In the case that this is an input event, we actually need
        # to resolve the parent step ID via its parents outputs.
        parent_step_id = None
        for parent_id in step_parent_step_ids:
            self.get_step_by_id(parent_id)
            parent_outputs = self.get_step_output_artifacts(
                step_id=parent_id,
            )
            for parent_output in parent_outputs.values():
                if artifact_proto.id == parent_output.mlmd_id:
                    parent_step_id = parent_id
        assert parent_step_id is not None

        artifact = self._get_artifact_model_from_proto(
            artifact_proto, parent_step_id=parent_step_id
        )
        inputs[artifact_name] = artifact

    logger.debug("Fetched %d inputs for step '%d'.", len(inputs), step_id)
    return inputs
get_step_output_artifacts(self, step_id)

Returns the output artifacts for the given step.

Parameters:

Name Type Description Default
step_id int

The ID of the step to get the output artifacts for.

required

Returns:

Type Description
Dict[str, zenml.zen_stores.metadata_store.MLMDArtifactModel]

A dict mapping output names to output artifacts.

Source code in zenml/zen_stores/metadata_store.py
def get_step_output_artifacts(
    self, step_id: int
) -> Dict[str, MLMDArtifactModel]:
    """Returns the output artifacts for the given step.

    Args:
        step_id: The ID of the step to get the output artifacts for.

    Returns:
        A dict mapping output names to output artifacts.
    """
    events = self.store.get_events_by_execution_ids([step_id])  # noqa
    events = [event for event in events if event.type == event.OUTPUT]
    output_artifact_ids = [event.artifact_id for event in events]
    artifacts = self.store.get_artifacts_by_id(output_artifact_ids)
    events.sort(key=lambda x: x.artifact_id)
    artifacts.sort(key=lambda x: x.id)

    outputs: Dict[str, MLMDArtifactModel] = {}
    for event_proto, artifact_proto in zip(events, artifacts):
        assert event_proto.artifact_id == artifact_proto.id
        artifact_name = event_proto.path.steps[0].key
        artifact = self._get_artifact_model_from_proto(
            artifact_proto, parent_step_id=step_id
        )
        outputs[artifact_name] = artifact

    logger.debug("Fetched %d outputs for step '%d'.", len(outputs), step_id)
    return outputs
get_step_status(self, step_id)

Gets the execution status of a single step.

Parameters:

Name Type Description Default
step_id int

The ID of the step to get the status for.

required

Returns:

Type Description
ExecutionStatus

The status of the step.

Source code in zenml/zen_stores/metadata_store.py
def get_step_status(self, step_id: int) -> ExecutionStatus:
    """Gets the execution status of a single step.

    Args:
        step_id: The ID of the step to get the status for.

    Returns:
        ExecutionStatus: The status of the step.
    """
    proto = self.store.get_executions_by_id([step_id])[0]  # noqa
    state = proto.last_known_state

    if state == proto.COMPLETE:
        return ExecutionStatus.COMPLETED
    elif state == proto.RUNNING:
        return ExecutionStatus.RUNNING
    elif state == proto.CACHED:
        return ExecutionStatus.CACHED
    else:
        return ExecutionStatus.FAILED

migrations special

Alembic database migration utilities.

alembic

Alembic utilities wrapper.

The Alembic class defined here acts as a wrapper around the Alembic library that automatically configures Alembic to use the ZenML SQL store database connection.

Alembic

Alembic environment and migration API.

This class provides a wrapper around the Alembic library that automatically configures Alembic to use the ZenML SQL store database connection.

Source code in zenml/zen_stores/migrations/alembic.py
class Alembic:
    """Alembic environment and migration API.

    This class provides a wrapper around the Alembic library that automatically
    configures Alembic to use the ZenML SQL store database connection.
    """

    def __init__(
        self,
        engine: Engine,
        metadata: MetaData = SQLModel.metadata,
        context: Optional[EnvironmentContext] = None,
        **kwargs: Any,
    ) -> None:
        """Initialize the Alembic wrapper.

        Args:
            engine: The SQLAlchemy engine to use.
            metadata: The SQLAlchemy metadata to use.
            context: The Alembic environment context to use. If not set, a new
                context is created pointing to the ZenML migrations directory.
            **kwargs: Additional keyword arguments to pass to the Alembic
                environment context.
        """
        self.engine = engine
        self.metadata = metadata
        self.context_kwargs = kwargs

        self.config = Config()
        self.config.set_main_option(
            "script_location", str(Path(__file__).parent)
        )
        self.config.set_main_option(
            "version_locations", str(Path(__file__).parent / "versions")
        )

        self.script_directory = ScriptDirectory.from_config(self.config)
        if context is None:
            self.environment_context = EnvironmentContext(
                self.config, self.script_directory
            )
        else:
            self.environment_context = context

    def db_is_empty(self) -> bool:
        """Check if the database is empty.

        Returns:
            True if the database is empty, False otherwise.
        """
        # Check the existence of any of the SQLModel tables
        return not self.engine.dialect.has_table(
            self.engine.connect(), schemas.StackSchema.__tablename__
        )

    def run_migrations(
        self,
        fn: Optional[Callable[[_RevIdType, MigrationContext], List[Any]]],
    ) -> None:
        """Run an online migration function in the current migration context.

        Args:
            fn: Migration function to run. If not set, the function configured
                externally by the Alembic CLI command is used.
        """
        fn_context_args: Dict[Any, Any] = {}
        if fn is not None:
            fn_context_args["fn"] = fn

        with self.engine.connect() as connection:
            self.environment_context.configure(
                connection=connection,
                target_metadata=self.metadata,
                include_object=include_object,
                compare_type=True,
                render_as_batch=True,
                **fn_context_args,
                **self.context_kwargs,
            )

            with self.environment_context.begin_transaction():
                self.environment_context.run_migrations()

    def current_revisions(self) -> List[str]:
        """Get the current database revisions.

        Returns:
            List of head revisions.
        """
        current_revisions: List[str] = []

        def do_get_current_rev(rev: _RevIdType, context: Any) -> List[Any]:
            nonlocal current_revisions

            for r in self.script_directory.get_all_current(
                rev  # type:ignore [arg-type]
            ):
                if r is None:
                    continue
                current_revisions.append(r.revision)
            return []

        self.run_migrations(do_get_current_rev)

        return current_revisions

    def stamp(self, revision: str) -> None:
        """Stamp the revision table with the given revision without running any migrations.

        Args:
            revision: String revision target.
        """

        def do_stamp(rev: _RevIdType, context: Any) -> List[Any]:
            return self.script_directory._stamp_revs(revision, rev)

        self.run_migrations(do_stamp)

    def upgrade(self, revision: str = "heads") -> None:
        """Upgrade the database to a later version.

        Args:
            revision: String revision target.
        """

        def do_upgrade(rev: _RevIdType, context: Any) -> List[Any]:
            return self.script_directory._upgrade_revs(
                revision, rev  # type:ignore [arg-type]
            )

        self.run_migrations(do_upgrade)

    def downgrade(self, revision: str) -> None:
        """Revert the database to a previous version.

        Args:
            revision: String revision target.
        """

        def do_downgrade(rev: _RevIdType, context: Any) -> List[Any]:
            return self.script_directory._downgrade_revs(
                revision, rev  # type:ignore [arg-type]
            )

        self.run_migrations(do_downgrade)
__init__(self, engine, metadata=MetaData(), context=None, **kwargs) special

Initialize the Alembic wrapper.

Parameters:

Name Type Description Default
engine Engine

The SQLAlchemy engine to use.

required
metadata MetaData

The SQLAlchemy metadata to use.

MetaData()
context Optional[alembic.runtime.environment.EnvironmentContext]

The Alembic environment context to use. If not set, a new context is created pointing to the ZenML migrations directory.

None
**kwargs Any

Additional keyword arguments to pass to the Alembic environment context.

{}
Source code in zenml/zen_stores/migrations/alembic.py
def __init__(
    self,
    engine: Engine,
    metadata: MetaData = SQLModel.metadata,
    context: Optional[EnvironmentContext] = None,
    **kwargs: Any,
) -> None:
    """Initialize the Alembic wrapper.

    Args:
        engine: The SQLAlchemy engine to use.
        metadata: The SQLAlchemy metadata to use.
        context: The Alembic environment context to use. If not set, a new
            context is created pointing to the ZenML migrations directory.
        **kwargs: Additional keyword arguments to pass to the Alembic
            environment context.
    """
    self.engine = engine
    self.metadata = metadata
    self.context_kwargs = kwargs

    self.config = Config()
    self.config.set_main_option(
        "script_location", str(Path(__file__).parent)
    )
    self.config.set_main_option(
        "version_locations", str(Path(__file__).parent / "versions")
    )

    self.script_directory = ScriptDirectory.from_config(self.config)
    if context is None:
        self.environment_context = EnvironmentContext(
            self.config, self.script_directory
        )
    else:
        self.environment_context = context
current_revisions(self)

Get the current database revisions.

Returns:

Type Description
List[str]

List of head revisions.

Source code in zenml/zen_stores/migrations/alembic.py
def current_revisions(self) -> List[str]:
    """Get the current database revisions.

    Returns:
        List of head revisions.
    """
    current_revisions: List[str] = []

    def do_get_current_rev(rev: _RevIdType, context: Any) -> List[Any]:
        nonlocal current_revisions

        for r in self.script_directory.get_all_current(
            rev  # type:ignore [arg-type]
        ):
            if r is None:
                continue
            current_revisions.append(r.revision)
        return []

    self.run_migrations(do_get_current_rev)

    return current_revisions
db_is_empty(self)

Check if the database is empty.

Returns:

Type Description
bool

True if the database is empty, False otherwise.

Source code in zenml/zen_stores/migrations/alembic.py
def db_is_empty(self) -> bool:
    """Check if the database is empty.

    Returns:
        True if the database is empty, False otherwise.
    """
    # Check the existence of any of the SQLModel tables
    return not self.engine.dialect.has_table(
        self.engine.connect(), schemas.StackSchema.__tablename__
    )
downgrade(self, revision)

Revert the database to a previous version.

Parameters:

Name Type Description Default
revision str

String revision target.

required
Source code in zenml/zen_stores/migrations/alembic.py
def downgrade(self, revision: str) -> None:
    """Revert the database to a previous version.

    Args:
        revision: String revision target.
    """

    def do_downgrade(rev: _RevIdType, context: Any) -> List[Any]:
        return self.script_directory._downgrade_revs(
            revision, rev  # type:ignore [arg-type]
        )

    self.run_migrations(do_downgrade)
run_migrations(self, fn)

Run an online migration function in the current migration context.

Parameters:

Name Type Description Default
fn Optional[Callable[[Union[str, Sequence[str]], alembic.runtime.migration.MigrationContext], List[Any]]]

Migration function to run. If not set, the function configured externally by the Alembic CLI command is used.

required
Source code in zenml/zen_stores/migrations/alembic.py
def run_migrations(
    self,
    fn: Optional[Callable[[_RevIdType, MigrationContext], List[Any]]],
) -> None:
    """Run an online migration function in the current migration context.

    Args:
        fn: Migration function to run. If not set, the function configured
            externally by the Alembic CLI command is used.
    """
    fn_context_args: Dict[Any, Any] = {}
    if fn is not None:
        fn_context_args["fn"] = fn

    with self.engine.connect() as connection:
        self.environment_context.configure(
            connection=connection,
            target_metadata=self.metadata,
            include_object=include_object,
            compare_type=True,
            render_as_batch=True,
            **fn_context_args,
            **self.context_kwargs,
        )

        with self.environment_context.begin_transaction():
            self.environment_context.run_migrations()
stamp(self, revision)

Stamp the revision table with the given revision without running any migrations.

Parameters:

Name Type Description Default
revision str

String revision target.

required
Source code in zenml/zen_stores/migrations/alembic.py
def stamp(self, revision: str) -> None:
    """Stamp the revision table with the given revision without running any migrations.

    Args:
        revision: String revision target.
    """

    def do_stamp(rev: _RevIdType, context: Any) -> List[Any]:
        return self.script_directory._stamp_revs(revision, rev)

    self.run_migrations(do_stamp)
upgrade(self, revision='heads')

Upgrade the database to a later version.

Parameters:

Name Type Description Default
revision str

String revision target.

'heads'
Source code in zenml/zen_stores/migrations/alembic.py
def upgrade(self, revision: str = "heads") -> None:
    """Upgrade the database to a later version.

    Args:
        revision: String revision target.
    """

    def do_upgrade(rev: _RevIdType, context: Any) -> List[Any]:
        return self.script_directory._upgrade_revs(
            revision, rev  # type:ignore [arg-type]
        )

    self.run_migrations(do_upgrade)
AlembicVersion (Base)

Alembic version table.

Source code in zenml/zen_stores/migrations/alembic.py
class AlembicVersion(Base):  # type: ignore[valid-type,misc]
    """Alembic version table."""

    __tablename__ = "alembic_version"
    version_num = Column(String, nullable=False, primary_key=True)
include_object(object, name, type_, *args, **kwargs)

Function used to exclude tables from the migration scripts.

Parameters:

Name Type Description Default
object Any

The schema item object to check.

required
name str

The name of the object to check.

required
type_ str

The type of the object to check.

required
*args Any

Additional arguments.

()
**kwargs Any

Additional keyword arguments.

{}

Returns:

Type Description
bool

True if the object should be included, False otherwise.

Source code in zenml/zen_stores/migrations/alembic.py
def include_object(
    object: Any, name: str, type_: str, *args: Any, **kwargs: Any
) -> bool:
    """Function used to exclude tables from the migration scripts.

    Args:
        object: The schema item object to check.
        name: The name of the object to check.
        type_: The type of the object to check.
        *args: Additional arguments.
        **kwargs: Additional keyword arguments.

    Returns:
        True if the object should be included, False otherwise.
    """
    return not (type_ == "table" and name in exclude_tables)

versions special

Alembic database migration scripts.

5330ba58bf20_rename_tables_and_foreign_keys

Rename tables and foreign keys [5330ba58bf20].

Revision ID: 5330ba58bf20 Revises: 7280c14811d6 Create Date: 2022-11-03 16:33:15.220179

downgrade()

Downgrade database schema and/or data back to the previous revision.

Source code in zenml/zen_stores/migrations/versions/5330ba58bf20_rename_tables_and_foreign_keys.py
def downgrade() -> None:
    """Downgrade database schema and/or data back to the previous revision."""
    (
        old_table_names,
        new_table_names,
        project_not_null_tables,
        old_fk_constraints,
        new_fk_constraints,
    ) = _get_changes()

    # Drop new foreign key constraints
    for source, target, source_column, _, _ in new_fk_constraints:
        constraint_name = _fk_constraint_name(source, target, source_column)
        _drop_fk_constraint(source, constraint_name)

    # Remove `project_id NOT NULL` where appropriate.
    for table_name in project_not_null_tables:
        with op.batch_alter_table(table_name, schema=None) as batch_op:
            batch_op.alter_column(
                "project_id", existing_type=sa.CHAR(length=32), nullable=True
            )

    # Rename tables
    for old_table_name, new_table_name in zip(old_table_names, new_table_names):
        op.rename_table(new_table_name, old_table_name)

    # Create old foreign key constraints
    for (
        source,
        target,
        source_column,
        target_column,
        ondelete,
    ) in old_fk_constraints:
        _create_fk_constraint(
            source, target, source_column, target_column, ondelete
        )
upgrade()

Upgrade database schema and/or data, creating a new revision.

Exceptions:

Type Description
NotImplementedError

If the database engine is not SQLite or MySQL.

Source code in zenml/zen_stores/migrations/versions/5330ba58bf20_rename_tables_and_foreign_keys.py
def upgrade() -> None:
    """Upgrade database schema and/or data, creating a new revision.

    Raises:
        NotImplementedError: If the database engine is not SQLite or MySQL.
    """
    (
        old_table_names,
        new_table_names,
        project_not_null_tables,
        old_fk_constraints,
        new_fk_constraints,
    ) = _get_changes()

    engine_name = op.get_bind().engine.name

    # Under MySQL, we need to sort the old foreign keys by source table and
    # source column first since the default foreign key names contain the
    # foreign key number.
    if engine_name == "mysql":
        old_fk_constraints.sort(key=lambda x: (x[0], x[2]))
    source_table_fk_constraint_counts: Dict[str, int] = defaultdict(int)

    # Drop old foreign key constraints.
    for source, target, source_column, _, _ in old_fk_constraints:
        if engine_name == "sqlite":
            constraint_name = _fk_constraint_name(source, target, source_column)
        elif engine_name == "mysql":
            source_table_fk_constraint_counts[source] += 1
            fk_num = source_table_fk_constraint_counts[source]
            constraint_name = f"{source}_ibfk_{fk_num}"
        else:
            raise NotImplementedError(f"Unsupported engine: {engine_name}")
        _drop_fk_constraint(source, constraint_name)

    # Rename tables
    for old_table_name, new_table_name in zip(old_table_names, new_table_names):
        op.rename_table(old_table_name, new_table_name)

    # Set `project_id` to `NOT NULL` where appropriate.
    for table_name in project_not_null_tables:
        with op.batch_alter_table(table_name, schema=None) as batch_op:
            batch_op.alter_column(
                "project_id", existing_type=sa.CHAR(length=32), nullable=False
            )

    # Create new foreign key constraints
    for (
        source,
        target,
        source_column,
        target_column,
        ondelete,
    ) in new_fk_constraints:
        _create_fk_constraint(
            source, target, source_column, target_column, ondelete
        )
5994f9ad0489_introduce_role_permissions

Introduce role permissions [5994f9ad0489].

Revision ID: 5994f9ad0489 Revises: 0.21.1 Create Date: 2022-10-25 23:52:25.935344

downgrade()

Downgrade database schema and/or data back to the previous revision.

Source code in zenml/zen_stores/migrations/versions/5994f9ad0489_introduce_role_permissions.py
def downgrade() -> None:
    """Downgrade database schema and/or data back to the previous revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    op.drop_table("rolepermissionschema")
    # ### end Alembic commands ###
upgrade()

Upgrade database schema and/or data, creating a new revision.

Source code in zenml/zen_stores/migrations/versions/5994f9ad0489_introduce_role_permissions.py
def upgrade() -> None:
    """Upgrade database schema and/or data, creating a new revision."""
    # Create the rolepermissionschema table to track which permissions a given
    #  role grants
    op.create_table(
        "rolepermissionschema",
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("role_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.ForeignKeyConstraint(
            ["role_id"],
            ["roleschema.id"],
        ),
        sa.PrimaryKeyConstraint("name", "role_id"),
    )
    # get metadata from current connection
    meta = sa.MetaData(bind=op.get_bind())

    # pass in tuple with tables we want to reflect, otherwise whole database
    #  will get reflected
    meta.reflect(
        only=(
            "rolepermissionschema",
            "roleschema",
            "userroleassignmentschema",
            "teamroleassignmentschema",
            "userschema",
        )
    )

    # In order to ensure unique names on roles delete potential admin/guest role
    #  that might have been created previous to this alembic version
    userroleassignmentschema = sa.Table(
        "userroleassignmentschema",
        meta,
    )
    teamroleassignmentschema = sa.Table(
        "teamroleassignmentschema",
        meta,
    )
    roleschema = sa.Table(
        "roleschema",
        meta,
    )
    conn = op.get_bind()
    res = conn.execute(
        select(roleschema.c.id).where(roleschema.c.name.in_(["admin", "guest"]))
    ).fetchall()

    role_ids = [i[0] for i in res]

    conn.execute(
        userroleassignmentschema.delete().where(
            userroleassignmentschema.c.role_id.in_(role_ids)
        )
    )
    conn.execute(
        teamroleassignmentschema.delete().where(
            teamroleassignmentschema.c.role_id.in_(role_ids)
        )
    )
    conn.execute(
        roleschema.delete().where(
            or_(roleschema.c.name == "admin", roleschema.c.name == "guest")
        )
    )

    # Create the three standard permissions also defined in
    #  zenml.enums.PermissionType
    read = "read"
    write = "write"
    me = "me"

    admin_id = str(uuid.uuid4()).replace("-", "")
    guest_id = str(uuid.uuid4()).replace("-", "")

    # Prefill the roles table with the admin and guest role
    op.bulk_insert(
        sa.Table(
            "roleschema",
            meta,
        ),
        [
            {
                "id": admin_id,
                "name": "admin",
                "created": datetime.datetime.now(),
                "updated": datetime.datetime.now(),
            },
            {
                "id": guest_id,
                "name": "guest",
                "created": datetime.datetime.now(),
                "updated": datetime.datetime.now(),
            },
        ],
    )

    # Give the admin read, write and me permissions,
    # give the guest read and me permissions
    op.bulk_insert(
        sa.Table(
            "rolepermissionschema",
            meta,
        ),
        [
            {"role_id": admin_id, "name": read},
            {"role_id": admin_id, "name": write},
            {"role_id": admin_id, "name": me},
            {"role_id": guest_id, "name": read},
            {"role_id": guest_id, "name": me},
        ],
    )

    # In order to not break permissions for existing users, all existing users
    #  will be assigned the admin role
    userschema = sa.Table(
        "userschema",
        meta,
    )

    conn = op.get_bind()
    res = conn.execute(select(userschema.c.id)).fetchall()
    user_ids = [i[0] for i in res]

    for user_id in user_ids:
        op.bulk_insert(
            sa.Table(
                "userroleassignmentschema",
                meta,
            ),
            [
                {
                    "id": str(uuid.uuid4()).replace("-", ""),
                    "role_id": admin_id,
                    "user_id": user_id,
                    "created": datetime.datetime.now(),
                    "updated": datetime.datetime.now(),
                }
            ],
        )
7280c14811d6_use_text_type

Use Text type [7280c14811d6].

Revision ID: 7280c14811d6 Revises: 5994f9ad0489 Create Date: 2022-11-09 16:29:37.025589

downgrade()

Downgrade database schema and/or data back to the previous revision.

Source code in zenml/zen_stores/migrations/versions/7280c14811d6_use_text_type.py
def downgrade() -> None:
    """Downgrade database schema and/or data back to the previous revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table("steprunschema", schema=None) as batch_op:
        batch_op.alter_column(
            "docstring",
            existing_type=sa.TEXT(),
            type_=sa.VARCHAR(length=4096),
            existing_nullable=True,
        )
        batch_op.alter_column(
            "step_configuration",
            existing_type=sa.TEXT(),
            type_=sa.VARCHAR(length=4096),
            existing_nullable=False,
        )
        batch_op.alter_column(
            "parameters",
            existing_type=sa.TEXT(),
            type_=sa.VARCHAR(length=4096),
            existing_nullable=False,
        )

    with op.batch_alter_table("pipelineschema", schema=None) as batch_op:
        batch_op.alter_column(
            "spec",
            existing_type=sa.TEXT(),
            type_=sa.VARCHAR(length=4096),
            existing_nullable=False,
        )
        batch_op.alter_column(
            "docstring",
            existing_type=sa.TEXT(),
            type_=sa.VARCHAR(length=4096),
            existing_nullable=True,
        )

    with op.batch_alter_table("pipelinerunschema", schema=None) as batch_op:
        batch_op.alter_column(
            "pipeline_configuration",
            existing_type=sa.TEXT(),
            type_=sa.VARCHAR(length=4096),
            existing_nullable=False,
        )

    # ### end Alembic commands ###
upgrade()

Upgrade database schema and/or data, creating a new revision.

Source code in zenml/zen_stores/migrations/versions/7280c14811d6_use_text_type.py
def upgrade() -> None:
    """Upgrade database schema and/or data, creating a new revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table("pipelinerunschema", schema=None) as batch_op:
        batch_op.alter_column(
            "pipeline_configuration",
            existing_type=sa.VARCHAR(length=4096),
            type_=sa.TEXT(),
            existing_nullable=False,
        )

    with op.batch_alter_table("pipelineschema", schema=None) as batch_op:
        batch_op.alter_column(
            "docstring",
            existing_type=sa.VARCHAR(length=4096),
            type_=sa.TEXT(),
            existing_nullable=True,
        )
        batch_op.alter_column(
            "spec",
            existing_type=sa.VARCHAR(length=4096),
            type_=sa.TEXT(),
            existing_nullable=False,
        )

    with op.batch_alter_table("steprunschema", schema=None) as batch_op:
        batch_op.alter_column(
            "parameters",
            existing_type=sa.VARCHAR(length=4096),
            type_=sa.TEXT(),
            existing_nullable=False,
        )
        batch_op.alter_column(
            "step_configuration",
            existing_type=sa.VARCHAR(length=4096),
            type_=sa.TEXT(),
            existing_nullable=False,
        )
        batch_op.alter_column(
            "docstring",
            existing_type=sa.VARCHAR(length=4096),
            type_=sa.TEXT(),
            existing_nullable=True,
        )

    # ### end Alembic commands ###
8a64fbfecda0_add_num_outputs_to_run_step

Add num_outputs to run step [8a64fbfecda0].

Revision ID: 8a64fbfecda0 Revises: 5330ba58bf20 Create Date: 2022-11-08 16:20:35.241562

downgrade()

Downgrade database schema and/or data back to the previous revision.

Source code in zenml/zen_stores/migrations/versions/8a64fbfecda0_add_num_outputs_to_run_step.py
def downgrade() -> None:
    """Downgrade database schema and/or data back to the previous revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table("step_run", schema=None) as batch_op:
        batch_op.drop_column("num_outputs")

    with op.batch_alter_table("pipeline_run", schema=None) as batch_op:
        batch_op.alter_column(
            "num_steps", existing_type=sa.Integer(), nullable=False
        )

    # ### end Alembic commands ###
upgrade()

Upgrade database schema and/or data, creating a new revision.

Source code in zenml/zen_stores/migrations/versions/8a64fbfecda0_add_num_outputs_to_run_step.py
def upgrade() -> None:
    """Upgrade database schema and/or data, creating a new revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table("pipeline_run", schema=None) as batch_op:
        batch_op.alter_column(
            "num_steps", existing_type=sa.Integer(), nullable=True
        )

    with op.batch_alter_table("step_run", schema=None) as batch_op:
        batch_op.add_column(
            sa.Column("num_outputs", sa.Integer(), nullable=True)
        )

    # ### end Alembic commands ###
alembic_start

Initialize db with first revision.

Revision ID: alembic_start Revises: Create Date: 2022-10-19 11:17:54.753102

downgrade()

Downgrade database schema and/or data back to the previous revision.

Source code in zenml/zen_stores/migrations/versions/alembic_start.py
def downgrade() -> None:
    """Downgrade database schema and/or data back to the previous revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    op.drop_table("stepinputartifactschema")
    op.drop_table("steprunorderschema")
    op.drop_table("artifactschema")
    op.drop_table("steprunschema")
    op.drop_table("stackcompositionschema")
    op.drop_table("pipelinerunschema")
    op.drop_table("userroleassignmentschema")
    op.drop_table("teamroleassignmentschema")
    op.drop_table("teamassignmentschema")
    op.drop_table("stackschema")
    op.drop_table("stackcomponentschema")
    op.drop_table("pipelineschema")
    op.drop_table("flavorschema")
    op.drop_table("userschema")
    op.drop_table("teamschema")
    op.drop_table("roleschema")
    op.drop_table("projectschema")
    # ### end Alembic commands ###
upgrade()

Upgrade database schema and/or data, creating a new revision.

Source code in zenml/zen_stores/migrations/versions/alembic_start.py
def upgrade() -> None:
    """Upgrade database schema and/or data, creating a new revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    op.create_table(
        "projectschema",
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column(
            "description", sqlmodel.sql.sqltypes.AutoString(), nullable=False
        ),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "roleschema",
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "teamschema",
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "userschema",
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column(
            "full_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False
        ),
        sa.Column("email", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
        sa.Column("active", sa.Boolean(), nullable=False),
        sa.Column(
            "password", sqlmodel.sql.sqltypes.AutoString(), nullable=True
        ),
        sa.Column(
            "activation_token",
            sqlmodel.sql.sqltypes.AutoString(),
            nullable=True,
        ),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.Column("email_opted_in", sa.Boolean(), nullable=True),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "flavorschema",
        sa.Column("project_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("source", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column(
            "integration", sqlmodel.sql.sqltypes.AutoString(), nullable=True
        ),
        sa.Column(
            "config_schema", sqlmodel.sql.sqltypes.AutoString(), nullable=False
        ),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ["project_id"], ["projectschema.id"], ondelete="CASCADE"
        ),
        sa.ForeignKeyConstraint(
            ["user_id"], ["userschema.id"], ondelete="SET NULL"
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "pipelineschema",
        sa.Column("project_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column(
            "docstring",
            sqlmodel.sql.sqltypes.AutoString(length=4096),
            nullable=True,
        ),
        sa.Column(
            "spec",
            sqlmodel.sql.sqltypes.AutoString(length=4096),
            nullable=False,
        ),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ["project_id"], ["projectschema.id"], ondelete="CASCADE"
        ),
        sa.ForeignKeyConstraint(
            ["user_id"], ["userschema.id"], ondelete="SET NULL"
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "stackcomponentschema",
        sa.Column("project_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("is_shared", sa.Boolean(), nullable=False),
        sa.Column("type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("flavor", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("configuration", sa.LargeBinary(), nullable=False),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ["project_id"], ["projectschema.id"], ondelete="CASCADE"
        ),
        sa.ForeignKeyConstraint(
            ["user_id"], ["userschema.id"], ondelete="SET NULL"
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "stackschema",
        sa.Column("project_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("is_shared", sa.Boolean(), nullable=False),
        sa.ForeignKeyConstraint(
            ["project_id"], ["projectschema.id"], ondelete="CASCADE"
        ),
        sa.ForeignKeyConstraint(
            ["user_id"], ["userschema.id"], ondelete="SET NULL"
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "teamassignmentschema",
        sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("team_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.ForeignKeyConstraint(
            ["team_id"],
            ["teamschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["user_id"],
            ["userschema.id"],
        ),
        sa.PrimaryKeyConstraint("user_id", "team_id"),
    )
    op.create_table(
        "teamroleassignmentschema",
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("role_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("team_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("project_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ["project_id"],
            ["projectschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["role_id"],
            ["roleschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["team_id"],
            ["teamschema.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "userroleassignmentschema",
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("role_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("project_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ["project_id"],
            ["projectschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["role_id"],
            ["roleschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["user_id"],
            ["userschema.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "pipelinerunschema",
        sa.Column("project_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("stack_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("pipeline_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column(
            "pipeline_configuration",
            sqlmodel.sql.sqltypes.AutoString(length=4096),
            nullable=False,
        ),
        sa.Column("num_steps", sa.Integer(), nullable=False),
        sa.Column(
            "zenml_version", sqlmodel.sql.sqltypes.AutoString(), nullable=False
        ),
        sa.Column("git_sha", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.Column("mlmd_id", sa.Integer(), nullable=True),
        sa.ForeignKeyConstraint(
            ["pipeline_id"], ["pipelineschema.id"], ondelete="SET NULL"
        ),
        sa.ForeignKeyConstraint(
            ["project_id"], ["projectschema.id"], ondelete="CASCADE"
        ),
        sa.ForeignKeyConstraint(
            ["stack_id"], ["stackschema.id"], ondelete="SET NULL"
        ),
        sa.ForeignKeyConstraint(
            ["user_id"], ["userschema.id"], ondelete="CASCADE"
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "stackcompositionschema",
        sa.Column("stack_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("component_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.ForeignKeyConstraint(
            ["component_id"],
            ["stackcomponentschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["stack_id"],
            ["stackschema.id"],
        ),
        sa.PrimaryKeyConstraint("stack_id", "component_id"),
    )
    op.create_table(
        "steprunschema",
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column(
            "pipeline_run_id", sqlmodel.sql.sqltypes.GUID(), nullable=False
        ),
        sa.Column(
            "entrypoint_name",
            sqlmodel.sql.sqltypes.AutoString(),
            nullable=False,
        ),
        sa.Column(
            "parameters",
            sqlmodel.sql.sqltypes.AutoString(length=4096),
            nullable=False,
        ),
        sa.Column(
            "step_configuration",
            sqlmodel.sql.sqltypes.AutoString(length=4096),
            nullable=False,
        ),
        sa.Column(
            "docstring",
            sqlmodel.sql.sqltypes.AutoString(length=4096),
            nullable=True,
        ),
        sa.Column("mlmd_id", sa.Integer(), nullable=True),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ["pipeline_run_id"],
            ["pipelinerunschema.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "artifactschema",
        sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column(
            "parent_step_id", sqlmodel.sql.sqltypes.GUID(), nullable=False
        ),
        sa.Column(
            "producer_step_id", sqlmodel.sql.sqltypes.GUID(), nullable=False
        ),
        sa.Column("type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column("uri", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.Column(
            "materializer", sqlmodel.sql.sqltypes.AutoString(), nullable=False
        ),
        sa.Column(
            "data_type", sqlmodel.sql.sqltypes.AutoString(), nullable=False
        ),
        sa.Column("is_cached", sa.Boolean(), nullable=False),
        sa.Column("mlmd_id", sa.Integer(), nullable=True),
        sa.Column("mlmd_parent_step_id", sa.Integer(), nullable=True),
        sa.Column("mlmd_producer_step_id", sa.Integer(), nullable=True),
        sa.Column("created", sa.DateTime(), nullable=False),
        sa.Column("updated", sa.DateTime(), nullable=False),
        sa.ForeignKeyConstraint(
            ["parent_step_id"],
            ["steprunschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["producer_step_id"],
            ["steprunschema.id"],
        ),
        sa.PrimaryKeyConstraint("id"),
    )
    op.create_table(
        "steprunorderschema",
        sa.Column("parent_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("child_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.ForeignKeyConstraint(
            ["child_id"],
            ["steprunschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["parent_id"],
            ["steprunschema.id"],
        ),
        sa.PrimaryKeyConstraint("parent_id", "child_id"),
    )
    op.create_table(
        "stepinputartifactschema",
        sa.Column("step_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("artifact_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
        sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
        sa.ForeignKeyConstraint(
            ["artifact_id"],
            ["artifactschema.id"],
        ),
        sa.ForeignKeyConstraint(
            ["step_id"],
            ["steprunschema.id"],
        ),
        sa.PrimaryKeyConstraint("step_id", "artifact_id"),
    )
    # ### end Alembic commands ###
c1b18cec3a48_increase_length_on_flavor_config_schema

Increase length on flavor config schema [c1b18cec3a48].

Revision ID: c1b18cec3a48 Revises: alembic_start Create Date: 2022-10-19 17:12:19.481776

downgrade()

Downgrade database schema and/or data back to the previous revision.

Source code in zenml/zen_stores/migrations/versions/c1b18cec3a48_increase_length_on_flavor_config_schema.py
def downgrade() -> None:
    """Downgrade database schema and/or data back to the previous revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table("flavorschema", schema=None) as batch_op:
        batch_op.alter_column(
            "config_schema",
            existing_type=sa.VARCHAR(),
            type_=sqlmodel.sql.sqltypes.AutoString(),
            nullable=False,
        )

    # ### end Alembic commands ###
upgrade()

Upgrade database schema and/or data, creating a new revision.

Source code in zenml/zen_stores/migrations/versions/c1b18cec3a48_increase_length_on_flavor_config_schema.py
def upgrade() -> None:
    """Upgrade database schema and/or data, creating a new revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table("flavorschema", schema=None) as batch_op:
        batch_op.alter_column(
            "config_schema",
            existing_type=sa.VARCHAR(),
            type_=sqlmodel.sql.sqltypes.AutoString(4096),
            nullable=True,
        )

    # ### end Alembic commands ###
ccd68b7825ae_add_status_to_pipeline_and_step_run

Add status to pipeline and step run [ccd68b7825ae].

Revision ID: ccd68b7825ae Revises: c1b18cec3a48 Create Date: 2022-10-24 16:49:37.007641

downgrade()

Downgrade database schema and/or data back to the previous revision.

Source code in zenml/zen_stores/migrations/versions/ccd68b7825ae_add_status_to_pipeline_and_step_run.py
def downgrade() -> None:
    """Downgrade database schema and/or data back to the previous revision."""
    with op.batch_alter_table("steprunschema", schema=None) as batch_op:
        batch_op.drop_column("status")

    with op.batch_alter_table("pipelinerunschema", schema=None) as batch_op:
        batch_op.drop_column("status")
upgrade()

Upgrade database schema and/or data, creating a new revision.

Source code in zenml/zen_stores/migrations/versions/ccd68b7825ae_add_status_to_pipeline_and_step_run.py
def upgrade() -> None:
    """Upgrade database schema and/or data, creating a new revision."""
    with op.batch_alter_table("pipelinerunschema", schema=None) as batch_op:
        batch_op.add_column(
            sa.Column(
                "status", sqlmodel.sql.sqltypes.AutoString(), nullable=True
            ),
        )
    op.execute("UPDATE pipelinerunschema SET status = 'running'")
    with op.batch_alter_table("pipelinerunschema", schema=None) as batch_op:
        batch_op.alter_column(
            "status",
            nullable=False,
            existing_type=sqlmodel.sql.sqltypes.AutoString(),
        )

    with op.batch_alter_table("steprunschema", schema=None) as batch_op:
        batch_op.add_column(
            sa.Column(
                "status", sqlmodel.sql.sqltypes.AutoString(), nullable=True
            ),
        )
    op.execute("UPDATE steprunschema SET status = 'running'")
    with op.batch_alter_table("steprunschema", schema=None) as batch_op:
        batch_op.alter_column(
            "status",
            nullable=False,
            existing_type=sqlmodel.sql.sqltypes.AutoString(),
        )
d02b3d3464cf_add_orchestrator_run_id_column

Add orchestrator_run_id column [d02b3d3464cf].

Revision ID: d02b3d3464cf Revises: ccd68b7825ae Create Date: 2022-10-26 16:50:44.965578

downgrade()

Downgrade database schema and/or data back to the previous revision.

Source code in zenml/zen_stores/migrations/versions/d02b3d3464cf_add_orchestrator_run_id_column.py
def downgrade() -> None:
    """Downgrade database schema and/or data back to the previous revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table("pipelinerunschema", schema=None) as batch_op:
        batch_op.drop_column("orchestrator_run_id")

    # ### end Alembic commands ###
upgrade()

Upgrade database schema and/or data, creating a new revision.

Source code in zenml/zen_stores/migrations/versions/d02b3d3464cf_add_orchestrator_run_id_column.py
def upgrade() -> None:
    """Upgrade database schema and/or data, creating a new revision."""
    # ### commands auto generated by Alembic - please adjust! ###
    with op.batch_alter_table("pipelinerunschema", schema=None) as batch_op:
        batch_op.add_column(
            sa.Column(
                "orchestrator_run_id",
                sqlmodel.sql.sqltypes.AutoString(),
                nullable=True,
            )
        )

    # ### end Alembic commands ###

rest_zen_store

REST Zen Store implementation.

RestZenStore (BaseZenStore) pydantic-model

Store implementation for accessing data from a REST API.

Source code in zenml/zen_stores/rest_zen_store.py
class RestZenStore(BaseZenStore):
    """Store implementation for accessing data from a REST API."""

    config: RestZenStoreConfiguration
    TYPE: ClassVar[StoreType] = StoreType.REST
    CONFIG_TYPE: ClassVar[Type[StoreConfiguration]] = RestZenStoreConfiguration
    _api_token: Optional[str] = None
    _session: Optional[requests.Session] = None

    def _initialize_database(self) -> None:
        """Initialize the database."""
        # don't do anything for a REST store

    # ====================================
    # ZenML Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the REST store."""
        client_version = zenml.__version__
        server_version = self.get_store_info().version

        if not DISABLE_CLIENT_SERVER_MISMATCH_WARNING and (
            server_version != client_version
        ):
            logger.warning(
                "Your ZenML client version (%s) does not match the server "
                "version (%s). This version mismatch might lead to errors or "
                "unexpected behavior. \nTo disable this warning message, set "
                "the environment variable `%s=True`",
                client_version,
                server_version,
                ENV_ZENML_DISABLE_CLIENT_SERVER_MISMATCH_WARNING,
            )

    def get_store_info(self) -> ServerModel:
        """Get information about the server.

        Returns:
            Information about the server.
        """
        body = self.get(INFO)
        return ServerModel.parse_obj(body)

    # ------------
    # TFX Metadata
    # ------------

    def get_metadata_config(
        self, expand_certs: bool = False
    ) -> Union["ConnectionConfig", "MetadataStoreClientConfig"]:
        """Get the TFX metadata config of this ZenStore.

        Args:
            expand_certs: Whether to expand the certificate paths in the
                connection config to their value.

        Raises:
            ValueError: if the server response is invalid.

        Returns:
            The TFX metadata config of this ZenStore.
        """
        from google.protobuf.json_format import Parse, ParseError
        from ml_metadata.proto.metadata_store_pb2 import (
            ConnectionConfig,
            MetadataStoreClientConfig,
        )

        from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

        body = self.get(f"{METADATA_CONFIG}")
        if not isinstance(body, str):
            raise ValueError(
                f"Invalid response from server: {body}. Expected string."
            )

        # First try to parse the response as a ConnectionConfig, then as a
        # MetadataStoreClientConfig.
        try:
            metadata_config_pb = Parse(body, ConnectionConfig())
        except ParseError:
            return Parse(body, MetadataStoreClientConfig())

        # if the server returns a SQLite connection config, but the file is not
        # available locally, we need to replace the path with the local path of
        # the default local SQLite database
        if metadata_config_pb.HasField("sqlite") and not os.path.isfile(
            metadata_config_pb.sqlite.filename_uri
        ):
            message = (
                f"The ZenML server is using a SQLite database at "
                f"{metadata_config_pb.sqlite.filename_uri} that is not "
                f"available locally. Using the default local SQLite "
                f"database instead."
            )
            if not self.is_local_store():
                logger.warning(message)
            else:
                logger.debug(message)
            default_store_cfg = GlobalConfiguration().get_default_store()
            assert isinstance(default_store_cfg, SqlZenStoreConfiguration)
            return default_store_cfg.get_metadata_config()

        if metadata_config_pb.HasField("mysql"):
            # If the server returns a MySQL connection config with a hostname
            # that is a Docker or K3D internal hostname that cannot be resolved
            # locally, we need to replace it with localhost. We're assuming
            # that we're running on the host machine and the MySQL server can
            # be accessed via localhost.
            metadata_config_pb.mysql.host = (
                replace_internal_hostname_with_localhost(
                    metadata_config_pb.mysql.host
                )
            )

            if not expand_certs and metadata_config_pb.mysql.HasField(
                "ssl_options"
            ):
                # Save the certificates in a secure location on disk
                secret_folder = Path(
                    GlobalConfiguration().local_stores_path,
                    "certificates",
                )
                for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                    if not metadata_config_pb.mysql.ssl_options.HasField(
                        key.lstrip("ssl_")
                    ):
                        continue
                    content = getattr(
                        metadata_config_pb.mysql.ssl_options,
                        key.lstrip("ssl_"),
                    )
                    if content and not os.path.isfile(content):
                        fileio.makedirs(str(secret_folder))
                        file_path = Path(secret_folder, f"{key}.pem")
                        with open(file_path, "w") as f:
                            f.write(content)
                        file_path.chmod(0o600)
                        setattr(
                            metadata_config_pb.mysql.ssl_options,
                            key.lstrip("ssl_"),
                            str(file_path),
                        )

        return metadata_config_pb

    # ------
    # Stacks
    # ------

    @track(AnalyticsEvent.REGISTERED_STACK)
    def create_stack(self, stack: StackRequestModel) -> StackResponseModel:
        """Register a new stack.

        Args:
            stack: The stack to register.

        Returns:
            The registered stack.
        """
        return self._create_project_scoped_resource(
            resource=stack,
            route=STACKS,
            response_model=StackResponseModel,
        )

    def get_stack(self, stack_id: UUID) -> StackResponseModel:
        """Get a stack by its unique ID.

        Args:
            stack_id: The ID of the stack to get.

        Returns:
            The stack with the given ID.
        """
        return self._get_resource(
            resource_id=stack_id,
            route=STACKS,
            response_model=StackResponseModel,
        )

    def list_stacks(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        component_id: Optional[UUID] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[StackResponseModel]:
        """List all stacks matching the given filter criteria.

        Args:
            project_name_or_id: ID or name of the Project containing the stack
            user_name_or_id: Optionally filter stacks by their owner
            component_id: Optionally filter for stacks that contain the
                          component
            name: Optionally filter stacks by their name
            is_shared: Optionally filter out stacks by whether they are shared
                or not

        Returns:
            A list of all stacks matching the filter criteria.
        """
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=STACKS,
            response_model=StackResponseModel,
            **filters,
        )

    @track(AnalyticsEvent.UPDATED_STACK)
    def update_stack(
        self, stack_id: UUID, stack_update: StackUpdateModel
    ) -> StackResponseModel:
        """Update a stack.

        Args:
            stack_id: The ID of the stack update.
            stack_update: The update request on the stack.

        Returns:
            The updated stack.
        """
        return self._update_resource(
            resource_id=stack_id,
            resource_update=stack_update,
            route=STACKS,
            response_model=StackResponseModel,
        )

    @track(AnalyticsEvent.DELETED_STACK)
    def delete_stack(self, stack_id: UUID) -> None:
        """Delete a stack.

        Args:
            stack_id: The ID of the stack to delete.
        """
        self._delete_resource(
            resource_id=stack_id,
            route=STACKS,
        )

    # ----------------
    # Stack components
    # ----------------

    @track(AnalyticsEvent.REGISTERED_STACK_COMPONENT)
    def create_stack_component(
        self,
        component: ComponentRequestModel,
    ) -> ComponentResponseModel:
        """Create a stack component.

        Args:
            component: The stack component to create.

        Returns:
            The created stack component.
        """
        return self._create_project_scoped_resource(
            resource=component,
            route=STACK_COMPONENTS,
            response_model=ComponentResponseModel,
        )

    def get_stack_component(self, component_id: UUID) -> ComponentResponseModel:
        """Get a stack component by ID.

        Args:
            component_id: The ID of the stack component to get.

        Returns:
            The stack component.
        """
        return self._get_resource(
            resource_id=component_id,
            route=STACK_COMPONENTS,
            response_model=ComponentResponseModel,
        )

    def list_stack_components(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        type: Optional[str] = None,
        flavor_name: Optional[str] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[ComponentResponseModel]:
        """List all stack components matching the given filter criteria.

        Args:
            project_name_or_id: The ID or name of the Project to which the stack
                components belong
            type: Optionally filter by type of stack component
            flavor_name: Optionally filter by flavor
            user_name_or_id: Optionally filter stack components by the owner
            name: Optionally filter stack component by name
            is_shared: Optionally filter out stack component by whether they are
                shared or not

        Returns:
            A list of all stack components matching the filter criteria.
        """
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=STACK_COMPONENTS,
            response_model=ComponentResponseModel,
            **filters,
        )

    @track(AnalyticsEvent.UPDATED_STACK_COMPONENT)
    def update_stack_component(
        self,
        component_id: UUID,
        component_update: ComponentUpdateModel,
    ) -> ComponentResponseModel:
        """Update an existing stack component.

        Args:
            component_id: The ID of the stack component to update.
            component_update: The update to be applied to the stack component.

        Returns:
            The updated stack component.
        """
        return self._update_resource(
            resource_id=component_id,
            resource_update=component_update,
            route=STACK_COMPONENTS,
            response_model=ComponentResponseModel,
        )

    @track(AnalyticsEvent.DELETED_STACK_COMPONENT)
    def delete_stack_component(self, component_id: UUID) -> None:
        """Delete a stack component.

        Args:
            component_id: The ID of the stack component to delete.
        """
        self._delete_resource(
            resource_id=component_id,
            route=STACK_COMPONENTS,
        )

    # -----------------------
    # Stack component flavors
    # -----------------------

    @track(AnalyticsEvent.CREATED_FLAVOR)
    def create_flavor(self, flavor: FlavorRequestModel) -> FlavorResponseModel:
        """Creates a new stack component flavor.

        Args:
            flavor: The stack component flavor to create.

        Returns:
            The newly created flavor.
        """
        return self._create_project_scoped_resource(
            resource=flavor,
            route=FLAVORS,
            response_model=FlavorResponseModel,
        )

    def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
        """Get a stack component flavor by ID.

        Args:
            flavor_id: The ID of the stack component flavor to get.

        Returns:
            The stack component flavor.
        """
        return self._get_resource(
            resource_id=flavor_id,
            route=FLAVORS,
            response_model=FlavorResponseModel,
        )

    def list_flavors(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        component_type: Optional[StackComponentType] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[FlavorResponseModel]:
        """List all stack component flavors matching the given filter criteria.

        Args:
            project_name_or_id: Optionally filter by the Project to which the
                component flavors belong
            user_name_or_id: Optionally filter by the owner
            component_type: Optionally filter by type of stack component
            name: Optionally filter flavors by name
            is_shared: Optionally filter out flavors by whether they are
                shared or not

        Returns:
            List of all the stack component flavors matching the given criteria.
        """
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=FLAVORS,
            response_model=FlavorResponseModel,
            **filters,
        )

    @track(AnalyticsEvent.DELETED_FLAVOR)
    def delete_flavor(self, flavor_id: UUID) -> None:
        """Delete a stack component flavor.

        Args:
            flavor_id: The ID of the stack component flavor to delete.
        """
        self._delete_resource(
            resource_id=flavor_id,
            route=FLAVORS,
        )

    # -----
    # Users
    # -----

    @property
    def active_user_name(self) -> str:
        """Gets the active username.

        Returns:
            The active username.
        """
        return self.config.username

    @track(AnalyticsEvent.CREATED_USER)
    def create_user(self, user: UserRequestModel) -> UserResponseModel:
        """Creates a new user.

        Args:
            user: User to be created.

        Returns:
            The newly created user.
        """
        return self._create_resource(
            resource=user,
            route=USERS + "?assign_default_role=False",
            response_model=UserResponseModel,
        )

    def get_user(self, user_name_or_id: Union[str, UUID]) -> UserResponseModel:
        """Gets a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Returns:
            The requested user, if it was found.
        """
        return self._get_resource(
            resource_id=user_name_or_id,
            route=USERS,
            response_model=UserResponseModel,
        )

    def get_auth_user(
        self, user_name_or_id: Union[str, UUID]
    ) -> "UserAuthModel":
        """Gets the auth model to a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Raises:
            NotImplementedError: This method is only available for the
                SQLZenStore.
        """
        raise NotImplementedError(
            "This method is only designed for use"
            " by the server endpoints. It is not designed"
            " to be called from the client side."
        )

    def list_users(self, name: Optional[str] = None) -> List[UserResponseModel]:
        """List all users.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all users.
        """
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=USERS,
            response_model=UserResponseModel,
            **filters,
        )

    @track(AnalyticsEvent.UPDATED_USER)
    def update_user(
        self, user_id: UUID, user_update: UserUpdateModel
    ) -> UserResponseModel:
        """Updates an existing user.

        Args:
            user_id: The id of the user to update.
            user_update: The update to be applied to the user.

        Returns:
            The updated user.
        """
        return self._update_resource(
            resource_id=user_id,
            resource_update=user_update,
            route=USERS,
            response_model=UserResponseModel,
        )

    @track(AnalyticsEvent.DELETED_USER)
    def delete_user(self, user_name_or_id: Union[str, UUID]) -> None:
        """Deletes a user.

        Args:
            user_name_or_id: The name or ID of the user to delete.
        """
        self._delete_resource(
            resource_id=user_name_or_id,
            route=USERS,
        )

    # -----
    # Teams
    # -----

    @track(AnalyticsEvent.CREATED_TEAM)
    def create_team(self, team: TeamRequestModel) -> TeamResponseModel:
        """Creates a new team.

        Args:
            team: The team model to create.

        Returns:
            The newly created team.
        """
        return self._create_resource(
            resource=team,
            route=TEAMS,
            response_model=TeamResponseModel,
        )

    def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
        """Gets a specific team.

        Args:
            team_name_or_id: Name or ID of the team to get.

        Returns:
            The requested team.
        """
        return self._get_resource(
            resource_id=team_name_or_id,
            route=TEAMS,
            response_model=TeamResponseModel,
        )

    def list_teams(self, name: Optional[str] = None) -> List[TeamResponseModel]:
        """List all teams.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all teams.
        """
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=TEAMS,
            response_model=TeamResponseModel,
            **filters,
        )

    @track(AnalyticsEvent.UPDATED_TEAM)
    def update_team(
        self, team_id: UUID, team_update: TeamUpdateModel
    ) -> TeamResponseModel:
        """Update an existing team.

        Args:
            team_id: The ID of the team to be updated.
            team_update: The update to be applied to the team.

        Returns:
            The updated team.
        """
        return self._update_resource(
            resource_id=team_id,
            resource_update=team_update,
            route=TEAMS,
            response_model=TeamResponseModel,
        )

    @track(AnalyticsEvent.DELETED_TEAM)
    def delete_team(self, team_name_or_id: Union[str, UUID]) -> None:
        """Deletes a team.

        Args:
            team_name_or_id: Name or ID of the team to delete.
        """
        self._delete_resource(
            resource_id=team_name_or_id,
            route=TEAMS,
        )

    # -----
    # Roles
    # -----

    @track(AnalyticsEvent.CREATED_ROLE)
    def create_role(self, role: RoleRequestModel) -> RoleResponseModel:
        """Creates a new role.

        Args:
            role: The role model to create.

        Returns:
            The newly created role.
        """
        return self._create_resource(
            resource=role,
            route=ROLES,
            response_model=RoleResponseModel,
        )

    def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
        """Gets a specific role.

        Args:
            role_name_or_id: Name or ID of the role to get.

        Returns:
            The requested role.
        """
        return self._get_resource(
            resource_id=role_name_or_id,
            route=ROLES,
            response_model=RoleResponseModel,
        )

    def list_roles(self, name: Optional[str] = None) -> List[RoleResponseModel]:
        """List all roles.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all roles.
        """
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=ROLES,
            response_model=RoleResponseModel,
            **filters,
        )

    @track(AnalyticsEvent.UPDATED_ROLE)
    def update_role(
        self, role_id: UUID, role_update: RoleUpdateModel
    ) -> RoleResponseModel:
        """Update an existing role.

        Args:
            role_id: The ID of the role to be updated.
            role_update: The update to be applied to the role.

        Returns:
            The updated role.
        """
        return self._update_resource(
            resource_id=role_id,
            resource_update=role_update,
            route=ROLES,
            response_model=RoleResponseModel,
        )

    @track(AnalyticsEvent.DELETED_ROLE)
    def delete_role(self, role_name_or_id: Union[str, UUID]) -> None:
        """Deletes a role.

        Args:
            role_name_or_id: Name or ID of the role to delete.
        """
        self._delete_resource(
            resource_id=role_name_or_id,
            route=ROLES,
        )

    # ----------------
    # Role assignments
    # ----------------

    def list_role_assignments(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        role_name_or_id: Optional[Union[str, UUID]] = None,
        team_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
    ) -> List[RoleAssignmentResponseModel]:
        """List all role assignments.

        Args:
            project_name_or_id: If provided, only list assignments for the given
                project
            role_name_or_id: If provided, only list assignments of the given
                role
            team_name_or_id: If provided, only list assignments for the given
                team
            user_name_or_id: If provided, only list assignments for the given
                user

        Returns:
            A list of all role assignments.
        """
        return self._list_resources(
            route=f"{ROLE_ASSIGNMENTS}",
            project_name_or_id=project_name_or_id,
            role_name_or_id=role_name_or_id,
            team_name_or_id=team_name_or_id,
            user_name_or_id=user_name_or_id,
            response_model=RoleAssignmentResponseModel,
        )

    def get_role_assignment(
        self, role_assignment_id: UUID
    ) -> RoleAssignmentResponseModel:
        """Get an existing role assignment by name or ID.

        Args:
            role_assignment_id: Name or ID of the role assignment to get.

        Returns:
            The requested project.
        """
        return self._get_resource(
            resource_id=role_assignment_id,
            route=ROLE_ASSIGNMENTS,
            response_model=RoleAssignmentResponseModel,
        )

    def delete_role_assignment(self, role_assignment_id: UUID) -> None:
        """Delete a specific role assignment.

        Args:
            role_assignment_id: The ID of the specific role assignment
        """
        self._delete_resource(
            resource_id=role_assignment_id,
            route=ROLE_ASSIGNMENTS,
        )

    def create_role_assignment(
        self, role_assignment: RoleAssignmentRequestModel
    ) -> RoleAssignmentResponseModel:
        """Creates a new role assignment.

        Args:
            role_assignment: The role assignment to create.

        Returns:
            The newly created project.
        """
        return self._create_resource(
            resource=role_assignment,
            route=ROLE_ASSIGNMENTS,
            response_model=RoleAssignmentResponseModel,
        )

    # --------
    # Projects
    # --------

    @track(AnalyticsEvent.CREATED_PROJECT)
    def create_project(
        self, project: ProjectRequestModel
    ) -> ProjectResponseModel:
        """Creates a new project.

        Args:
            project: The project to create.

        Returns:
            The newly created project.
        """
        return self._create_resource(
            resource=project,
            route=PROJECTS,
            response_model=ProjectResponseModel,
        )

    def get_project(
        self, project_name_or_id: Union[UUID, str]
    ) -> ProjectResponseModel:
        """Get an existing project by name or ID.

        Args:
            project_name_or_id: Name or ID of the project to get.

        Returns:
            The requested project.
        """
        return self._get_resource(
            resource_id=project_name_or_id,
            route=PROJECTS,
            response_model=ProjectResponseModel,
        )

    def list_projects(
        self, name: Optional[str] = None
    ) -> List[ProjectResponseModel]:
        """List all projects.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all projects.
        """
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=PROJECTS,
            response_model=ProjectResponseModel,
            **filters,
        )

    @track(AnalyticsEvent.UPDATED_PROJECT)
    def update_project(
        self, project_id: UUID, project_update: ProjectUpdateModel
    ) -> ProjectResponseModel:
        """Update an existing project.

        Args:
            project_id: The ID of the project to be updated.
            project_update: The update to be applied to the project.

        Returns:
            The updated project.
        """
        return self._update_resource(
            resource_id=project_id,
            resource_update=project_update,
            route=PROJECTS,
            response_model=ProjectResponseModel,
        )

    @track(AnalyticsEvent.DELETED_PROJECT)
    def delete_project(self, project_name_or_id: Union[str, UUID]) -> None:
        """Deletes a project.

        Args:
            project_name_or_id: Name or ID of the project to delete.
        """
        self._delete_resource(
            resource_id=project_name_or_id,
            route=PROJECTS,
        )

    # ---------
    # Pipelines
    # ---------

    @track(AnalyticsEvent.CREATE_PIPELINE)
    def create_pipeline(
        self, pipeline: PipelineRequestModel
    ) -> PipelineResponseModel:
        """Creates a new pipeline in a project.

        Args:
            pipeline: The pipeline to create.

        Returns:
            The newly created pipeline.
        """
        return self._create_project_scoped_resource(
            resource=pipeline,
            route=PIPELINES,
            response_model=PipelineResponseModel,
        )

    def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
        """Get a pipeline with a given ID.

        Args:
            pipeline_id: ID of the pipeline.

        Returns:
            The pipeline.
        """
        return self._get_resource(
            resource_id=pipeline_id,
            route=PIPELINES,
            response_model=PipelineResponseModel,
        )

    def list_pipelines(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        name: Optional[str] = None,
    ) -> List[PipelineResponseModel]:
        """List all pipelines in the project.

        Args:
            project_name_or_id: If provided, only list pipelines in this
                project.
            user_name_or_id: If provided, only list pipelines from this user.
            name: If provided, only list pipelines with this name.

        Returns:
            A list of pipelines.
        """
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=PIPELINES,
            response_model=PipelineResponseModel,
            **filters,
        )

    @track(AnalyticsEvent.UPDATE_PIPELINE)
    def update_pipeline(
        self, pipeline_id: UUID, pipeline_update: PipelineUpdateModel
    ) -> PipelineResponseModel:
        """Updates a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to be updated.
            pipeline_update: The update to be applied.

        Returns:
            The updated pipeline.
        """
        return self._update_resource(
            resource_id=pipeline_id,
            resource_update=pipeline_update,
            route=PIPELINES,
            response_model=PipelineResponseModel,
        )

    @track(AnalyticsEvent.DELETE_PIPELINE)
    def delete_pipeline(self, pipeline_id: UUID) -> None:
        """Deletes a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to delete.
        """
        self._delete_resource(
            resource_id=pipeline_id,
            route=PIPELINES,
        )

    # --------------
    # Pipeline runs
    # --------------

    def create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Creates a pipeline run.

        Args:
            pipeline_run: The pipeline run to create.

        Returns:
            The created pipeline run.
        """
        return self._create_project_scoped_resource(
            resource=pipeline_run,
            response_model=PipelineRunResponseModel,
            route=RUNS,
        )

    def get_run(
        self, run_name_or_id: Union[UUID, str]
    ) -> PipelineRunResponseModel:
        """Gets a pipeline run.

        Args:
            run_name_or_id: The name or ID of the pipeline run to get.

        Returns:
            The pipeline run.
        """
        self._sync_runs()
        return self._get_resource(
            resource_id=run_name_or_id,
            route=RUNS,
            response_model=PipelineRunResponseModel,
        )

    def get_or_create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Gets or creates a pipeline run.

        If a run with the same ID or name already exists, it is returned.
        Otherwise, a new run is created.

        Args:
            pipeline_run: The pipeline run to get or create.

        Returns:
            The pipeline run.
        """
        return self._create_project_scoped_resource(
            resource=pipeline_run,
            route=RUNS,
            response_model=PipelineRunResponseModel,
            params={"get_if_exists": True},
        )

    def list_runs(
        self,
        name: Optional[str] = None,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        stack_id: Optional[UUID] = None,
        component_id: Optional[UUID] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        pipeline_id: Optional[UUID] = None,
        unlisted: bool = False,
    ) -> List[PipelineRunResponseModel]:
        """Gets all pipeline runs.

        Args:
            project_name_or_id: If provided, only return runs for this project.
            stack_id: If provided, only return runs for this stack.
            component_id: Optionally filter for runs that used the
                          component
            name: Run name if provided
            user_name_or_id: If provided, only return runs for this user.
            pipeline_id: If provided, only return runs for this pipeline.
            unlisted: If True, only return unlisted runs that are not
                associated with any pipeline (filter by `pipeline_id==None`).

        Returns:
            A list of all pipeline runs.
        """
        self._sync_runs()
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=RUNS,
            response_model=PipelineRunResponseModel,
            **filters,
        )

    def update_run(
        self, run_id: UUID, run_update: PipelineRunUpdateModel
    ) -> PipelineRunResponseModel:
        """Updates a pipeline run.

        Args:
            run_id: The ID of the pipeline run to update.
            run_update: The update to be applied to the pipeline run.


        Returns:
            The updated pipeline run.
        """
        return self._update_resource(
            resource_id=run_id,
            resource_update=run_update,
            response_model=PipelineRunResponseModel,
            route=RUNS,
        )

    # ------------------
    # Pipeline run steps
    # ------------------

    def create_run_step(
        self, step: StepRunRequestModel
    ) -> StepRunResponseModel:
        """Creates a step.

        Args:
            step: The step to create.

        Returns:
            The created step.
        """
        return self._create_resource(
            resource=step,
            response_model=StepRunResponseModel,
            route=STEPS,
        )

    def get_run_step(self, step_id: UUID) -> StepRunResponseModel:
        """Get a step by ID.

        Args:
            step_id: The ID of the step to get.

        Returns:
            The step.
        """
        self._sync_runs()
        return self._get_resource(
            resource_id=step_id,
            route=STEPS,
            response_model=StepRunResponseModel,
        )

    def list_run_steps(
        self, run_id: Optional[UUID] = None
    ) -> List[StepRunResponseModel]:
        """Get all run steps.

        Args:
            run_id: If provided, only return steps for this pipeline run.

        Returns:
            A list of all run steps.
        """
        self._sync_runs()
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=STEPS,
            resource_model=StepRunResponseModel,
            response_model=StepRunResponseModel,
            **filters,
        )

    def update_run_step(
        self,
        step_id: UUID,
        step_update: StepRunUpdateModel,
    ) -> StepRunResponseModel:
        """Updates a step.

        Args:
            step_id: The ID of the step to update.
            step_update: The update to be applied to the step.

        Returns:
            The updated step.
        """
        return self._update_resource(
            resource_id=step_id,
            resource_update=step_update,
            response_model=StepRunResponseModel,
            route=STEPS,
        )

    def get_run_step_inputs(
        self, step_id: UUID
    ) -> Dict[str, ArtifactResponseModel]:
        """Get a list of inputs for a specific step.

        Args:
            step_id: The id of the step to get inputs for.

        Returns:
            A dict mapping artifact names to the input artifacts for the step.

        Raises:
            ValueError: if the response from the API is not a dict.
        """
        body = self.get(f"{STEPS}/{str(step_id)}{INPUTS}")
        if not isinstance(body, dict):
            raise ValueError(
                f"Bad API Response. Expected dict, got {type(body)}"
            )
        return {
            name: ArtifactResponseModel.parse_obj(entry)
            for name, entry in body.items()
        }

    # ---------
    # Artifacts
    # ---------

    def create_artifact(
        self, artifact: ArtifactRequestModel
    ) -> ArtifactResponseModel:
        """Creates an artifact.

        Args:
            artifact: The artifact to create.

        Returns:
            The created artifact.
        """
        return self._create_resource(
            resource=artifact,
            response_model=ArtifactResponseModel,
            route=ARTIFACTS,
        )

    def list_artifacts(
        self,
        artifact_uri: Optional[str] = None,
        parent_step_id: Optional[UUID] = None,
    ) -> List[ArtifactResponseModel]:
        """Lists all artifacts.

        Args:
            artifact_uri: If specified, only artifacts with the given URI will
                be returned.
            parent_step_id: If specified, only artifacts for the given step run
                will be returned.

        Returns:
            A list of all artifacts.
        """
        self._sync_runs()
        filters = locals()
        filters.pop("self")
        return self._list_resources(
            route=ARTIFACTS,
            resource_model=ArtifactResponseModel,
            response_model=ArtifactResponseModel,
            **filters,
        )

    # =======================
    # Internal helper methods
    # =======================

    def _get_auth_token(self) -> str:
        """Get the authentication token for the REST store.

        Returns:
            The authentication token.

        Raises:
            ValueError: if the response from the server isn't in the right
                format.
        """
        if self._api_token is None:
            response = self._handle_response(
                requests.post(
                    self.url + API + VERSION_1 + LOGIN,
                    data={
                        "username": self.config.username,
                        "password": self.config.password,
                    },
                    verify=self.config.verify_ssl,
                    timeout=self.config.http_timeout,
                )
            )
            if not isinstance(response, dict) or "access_token" not in response:
                raise ValueError(
                    f"Bad API Response. Expected access token dict, got "
                    f"{type(response)}"
                )
            self._api_token = response["access_token"]
        return self._api_token

    @property
    def session(self) -> requests.Session:
        """Authenticate to the ZenML server.

        Returns:
            A requests session with the authentication token.
        """
        if self._session is None:
            if self.config.verify_ssl is False:
                urllib3.disable_warnings(
                    urllib3.exceptions.InsecureRequestWarning
                )

            self._session = requests.Session()
            self._session.verify = self.config.verify_ssl
            token = self._get_auth_token()
            self._session.headers.update({"Authorization": "Bearer " + token})
            logger.debug("Authenticated to ZenML server.")
        return self._session

    @staticmethod
    def _handle_response(response: requests.Response) -> Json:
        """Handle API response, translating http status codes to Exception.

        Args:
            response: The response to handle.

        Returns:
            The parsed response.

        Raises:
            DoesNotExistException: If the response indicates that the
                requested entity does not exist.
            EntityExistsError: If the response indicates that the requested
                entity already exists.
            AuthorizationException: If the response indicates that the request
                is not authorized.
            IllegalOperationError: If the response indicates that the requested
                operation is forbidden.
            KeyError: If the response indicates that the requested entity
                does not exist.
            RuntimeError: If the response indicates that the requested entity
                does not exist.
            StackComponentExistsError: If the response indicates that the
                requested entity already exists.
            StackExistsError: If the response indicates that the requested
                entity already exists.
            ValueError: If the response indicates that the requested entity
                does not exist.
        """
        if 200 <= response.status_code < 300:
            try:
                payload: Json = response.json()
                return payload
            except requests.exceptions.JSONDecodeError:
                raise ValueError(
                    "Bad response from API. Expected json, got\n"
                    f"{response.text}"
                )
        elif response.status_code == 401:
            raise AuthorizationException(
                f"{response.status_code} Client Error: Unauthorized request to "
                f"URL {response.url}: {response.json().get('detail')}"
            )
        elif response.status_code == 403:
            msg = response.json().get("detail", response.text)
            if isinstance(msg, list):
                msg = msg[-1]
            raise IllegalOperationError(msg)
        elif response.status_code == 404:
            if "KeyError" in response.text:
                raise KeyError(
                    response.json().get("detail", (response.text,))[1]
                )
            elif "DoesNotExistException" in response.text:
                message = ": ".join(
                    response.json().get("detail", (response.text,))
                )
                raise DoesNotExistException(message)
            raise DoesNotExistException("Endpoint does not exist.")
        elif response.status_code == 409:
            if "StackComponentExistsError" in response.text:
                raise StackComponentExistsError(
                    message=": ".join(
                        response.json().get("detail", (response.text,))
                    )
                )
            elif "StackExistsError" in response.text:
                raise StackExistsError(
                    message=": ".join(
                        response.json().get("detail", (response.text,))
                    )
                )
            elif "EntityExistsError" in response.text:
                raise EntityExistsError(
                    message=": ".join(
                        response.json().get("detail", (response.text,))
                    )
                )
            else:
                raise ValueError(
                    ": ".join(response.json().get("detail", (response.text,)))
                )
        elif response.status_code == 422:
            raise RuntimeError(
                ": ".join(response.json().get("detail", (response.text,)))
            )
        elif response.status_code == 500:
            raise RuntimeError(response.text)
        else:
            raise RuntimeError(
                "Error retrieving from API. Got response "
                f"{response.status_code} with body:\n{response.text}"
            )

    def _request(
        self,
        method: str,
        url: str,
        params: Optional[Dict[str, Any]] = None,
        **kwargs: Any,
    ) -> Json:
        """Make a request to the REST API.

        Args:
            method: The HTTP method to use.
            url: The URL to request.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The parsed response.
        """
        params = {k: str(v) for k, v in params.items()} if params else {}
        try:
            return self._handle_response(
                self.session.request(
                    method,
                    url,
                    params=params,
                    verify=self.config.verify_ssl,
                    timeout=self.config.http_timeout,
                    **kwargs,
                )
            )
        except AuthorizationException:
            # The authentication token could have expired; refresh it and try
            # again
            self._session = None
            return self._handle_response(
                self.session.request(
                    method,
                    url,
                    params=params,
                    verify=self.config.verify_ssl,
                    timeout=self.config.http_timeout,
                    **kwargs,
                )
            )

    def get(
        self, path: str, params: Optional[Dict[str, Any]] = None, **kwargs: Any
    ) -> Json:
        """Make a GET request to the given endpoint path.

        Args:
            path: The path to the endpoint.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The response body.
        """
        logger.debug(f"Sending GET request to {path}...")
        return self._request(
            "GET", self.url + API + VERSION_1 + path, params=params, **kwargs
        )

    def delete(
        self, path: str, params: Optional[Dict[str, Any]] = None, **kwargs: Any
    ) -> Json:
        """Make a DELETE request to the given endpoint path.

        Args:
            path: The path to the endpoint.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The response body.
        """
        logger.debug(f"Sending DELETE request to {path}...")
        return self._request(
            "DELETE", self.url + API + VERSION_1 + path, params=params, **kwargs
        )

    def post(
        self,
        path: str,
        body: BaseModel,
        params: Optional[Dict[str, Any]] = None,
        **kwargs: Any,
    ) -> Json:
        """Make a POST request to the given endpoint path.

        Args:
            path: The path to the endpoint.
            body: The body to send.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The response body.
        """
        logger.debug(f"Sending POST request to {path}...")
        return self._request(
            "POST",
            self.url + API + VERSION_1 + path,
            data=body.json(),
            params=params,
            **kwargs,
        )

    def put(
        self,
        path: str,
        body: BaseModel,
        params: Optional[Dict[str, Any]] = None,
        **kwargs: Any,
    ) -> Json:
        """Make a PUT request to the given endpoint path.

        Args:
            path: The path to the endpoint.
            body: The body to send.
            params: The query parameters to pass to the endpoint.
            kwargs: Additional keyword arguments to pass to the request.

        Returns:
            The response body.
        """
        logger.debug(f"Sending PUT request to {path}...")
        return self._request(
            "PUT",
            self.url + API + VERSION_1 + path,
            data=body.json(exclude_unset=True),
            params=params,
            **kwargs,
        )

    def _create_resource(
        self,
        resource: BaseRequestModel,
        response_model: Type[AnyResponseModel],
        route: str,
        params: Optional[Dict[str, Any]] = None,
    ) -> AnyResponseModel:
        """Create a new resource.

        Args:
            resource: The resource to create.
            route: The resource REST API route to use.
            response_model: Optional model to use to deserialize the response
                body. If not provided, the resource class itself will be used.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            The created resource.
        """
        response_body = self.post(f"{route}", body=resource, params=params)
        return response_model.parse_obj(response_body)

    def _create_project_scoped_resource(
        self,
        resource: ProjectScopedRequestModel,
        response_model: Type[AnyProjestResponseModel],
        route: str,
        params: Optional[Dict[str, Any]] = None,
    ) -> AnyProjestResponseModel:
        """Create a new project scoped resource.

        Args:
            resource: The resource to create.
            route: The resource REST API route to use.
            response_model: Optional model to use to deserialize the response
                body. If not provided, the resource class itself will be used.
            params: Optional query parameters to pass to the endpoint.

        Returns:
            The created resource.
        """
        return self._create_resource(
            resource=resource,
            response_model=response_model,
            route=f"{PROJECTS}/{str(resource.project)}{route}",
            params=params,
        )

    def _get_resource(
        self,
        resource_id: Union[str, UUID],
        route: str,
        response_model: Type[AnyResponseModel],
    ) -> AnyResponseModel:
        """Retrieve a single resource.

        Args:
            resource_id: The ID of the resource to retrieve.
            route: The resource REST API route to use.
            response_model: Model to use to serialize the response body.

        Returns:
            The retrieved resource.
        """
        body = self.get(f"{route}/{str(resource_id)}")
        return response_model.parse_obj(body)

    def _list_resources(
        self,
        route: str,
        response_model: Type[AnyResponseModel],
        **filters: Any,
    ) -> List[AnyResponseModel]:
        """Retrieve a list of resources filtered by some criteria.

        Args:
            route: The resource REST API route to use.
            response_model: Model to use to serialize the response body.
            filters: Filter parameters to use in the query.

        Returns:
            List of retrieved resources matching the filter criteria.

        Raises:
            ValueError: If the value returned by the server is not a list.
        """
        # leave out filter params that are not supplied
        params = dict(filter(lambda x: x[1] is not None, filters.items()))
        body = self.get(f"{route}", params=params)
        if not isinstance(body, list):
            raise ValueError(
                f"Bad API Response. Expected list, got {type(body)}"
            )
        return [response_model.parse_obj(entry) for entry in body]

    def _update_resource(
        self,
        resource_id: UUID,
        resource_update: BaseRequestModel,
        response_model: Type[AnyResponseModel],
        route: str,
    ) -> AnyResponseModel:
        """Update an existing resource.

        Args:
            resource_id: The id of the resource to update.
            resource_update: The resource update.
            route: The resource REST API route to use.
            response_model: Optional model to use to deserialize the response
                body. If not provided, the resource class itself will be used.

        Returns:
            The updated resource.
        """
        response_body = self.put(
            f"{route}/{str(resource_id)}", body=resource_update
        )

        return response_model.parse_obj(response_body)

    def _delete_resource(
        self, resource_id: Union[str, UUID], route: str
    ) -> None:
        """Delete a resource.

        Args:
            resource_id: The ID of the resource to delete.
            route: The resource REST API route to use.
        """
        self.delete(f"{route}/{str(resource_id)}")

    def _sync_runs(self) -> None:
        """Syncs runs from MLMD."""
        self.get(METADATA_SYNC)
active_user_name: str property readonly

Gets the active username.

Returns:

Type Description
str

The active username.

session: Session property readonly

Authenticate to the ZenML server.

Returns:

Type Description
Session

A requests session with the authentication token.

CONFIG_TYPE (StoreConfiguration) pydantic-model

REST ZenML store configuration.

Attributes:

Name Type Description
username str

The username to use to connect to the Zen server.

password str

The password to use to connect to the Zen server.

verify_ssl Union[bool, str]

Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use or the CA bundle value itself.

http_timeout int

The timeout to use for all requests.

Source code in zenml/zen_stores/rest_zen_store.py
class RestZenStoreConfiguration(StoreConfiguration):
    """REST ZenML store configuration.

    Attributes:
        username: The username to use to connect to the Zen server.
        password: The password to use to connect to the Zen server.
        verify_ssl: Either a boolean, in which case it controls whether we
            verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use or the CA bundle value itself.
        http_timeout: The timeout to use for all requests.
    """

    type: StoreType = StoreType.REST
    username: str
    password: str = ""
    verify_ssl: Union[bool, str] = True
    http_timeout: int = DEFAULT_HTTP_TIMEOUT

    @validator("url")
    def validate_url(cls, url: str) -> str:
        """Validates that the URL is a well-formed REST store URL.

        Args:
            url: The URL to be validated.

        Returns:
            The validated URL without trailing slashes.

        Raises:
            ValueError: If the URL is not a well-formed REST store URL.
        """
        url = url.rstrip("/")
        scheme = re.search("^([a-z0-9]+://)", url)
        if scheme is None or scheme.group() not in ("https://", "http://"):
            raise ValueError(
                "Invalid URL for REST store: {url}. Should be in the form "
                "https://hostname[:port] or http://hostname[:port]."
            )

        # When running inside a container, if the URL uses localhost, the
        # target service will not be available. We try to replace localhost
        # with one of the special Docker or K3D internal hostnames.
        url = replace_localhost_with_internal_hostname(url)

        return url

    @validator("verify_ssl")
    def validate_verify_ssl(
        cls, verify_ssl: Union[bool, str]
    ) -> Union[bool, str]:
        """Validates that the verify_ssl either points to a file or is a bool.

        Args:
            verify_ssl: The verify_ssl value to be validated.

        Returns:
            The validated verify_ssl value.
        """
        secret_folder = Path(
            GlobalConfiguration().local_stores_path,
            "certificates",
        )
        if isinstance(verify_ssl, bool) or verify_ssl.startswith(
            str(secret_folder)
        ):
            return verify_ssl

        if os.path.isfile(verify_ssl):
            with open(verify_ssl, "r") as f:
                verify_ssl = f.read()

        fileio.makedirs(str(secret_folder))
        file_path = Path(secret_folder, "ca_bundle.pem")
        with open(file_path, "w") as f:
            f.write(verify_ssl)
        file_path.chmod(0o600)
        verify_ssl = str(file_path)

        return verify_ssl

    @classmethod
    def supports_url_scheme(cls, url: str) -> bool:
        """Check if a URL scheme is supported by this store.

        Args:
            url: The URL to check.

        Returns:
            True if the URL scheme is supported, False otherwise.
        """
        return urlparse(url).scheme in ("http", "https")

    def expand_certificates(self) -> None:
        """Expands the certificates in the verify_ssl field."""
        # Load the certificate values back into the configuration
        if isinstance(self.verify_ssl, str) and os.path.isfile(self.verify_ssl):
            with open(self.verify_ssl, "r") as f:
                self.verify_ssl = f.read()

    @classmethod
    def copy_configuration(
        cls,
        config: "StoreConfiguration",
        config_path: str,
        load_config_path: Optional[PurePath] = None,
    ) -> "StoreConfiguration":
        """Create a copy of the store config using a different path.

        This method is used to create a copy of the store configuration that can
        be loaded using a different configuration path or in the context of a
        new environment, such as a container image.

        The configuration files accompanying the store configuration are also
        copied to the new configuration path (e.g. certificates etc.).

        Args:
            config: The store configuration to copy.
            config_path: new path where the configuration copy will be loaded
                from.
            load_config_path: absolute path that will be used to load the copied
                configuration. This can be set to a value different from
                `config_path` if the configuration copy will be loaded from
                a different environment, e.g. when the configuration is copied
                to a container image and loaded using a different absolute path.
                This will be reflected in the paths and URLs encoded in the
                copied configuration.

        Returns:
            A new store configuration object that reflects the new configuration
            path.
        """
        assert isinstance(config, RestZenStoreConfiguration)
        config = config.copy(deep=True)

        # Load the certificate values back into the configuration
        config.expand_certificates()
        return config

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the `verify_ssl` attribute can be expanded to the contents
        # of the certificate file.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/rest_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the `verify_ssl` attribute can be expanded to the contents
    # of the certificate file.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
copy_configuration(config, config_path, load_config_path=None) classmethod

Create a copy of the store config using a different path.

This method is used to create a copy of the store configuration that can be loaded using a different configuration path or in the context of a new environment, such as a container image.

The configuration files accompanying the store configuration are also copied to the new configuration path (e.g. certificates etc.).

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to copy.

required
config_path str

new path where the configuration copy will be loaded from.

required
load_config_path Optional[pathlib.PurePath]

absolute path that will be used to load the copied configuration. This can be set to a value different from config_path if the configuration copy will be loaded from a different environment, e.g. when the configuration is copied to a container image and loaded using a different absolute path. This will be reflected in the paths and URLs encoded in the copied configuration.

None

Returns:

Type Description
StoreConfiguration

A new store configuration object that reflects the new configuration path.

Source code in zenml/zen_stores/rest_zen_store.py
@classmethod
def copy_configuration(
    cls,
    config: "StoreConfiguration",
    config_path: str,
    load_config_path: Optional[PurePath] = None,
) -> "StoreConfiguration":
    """Create a copy of the store config using a different path.

    This method is used to create a copy of the store configuration that can
    be loaded using a different configuration path or in the context of a
    new environment, such as a container image.

    The configuration files accompanying the store configuration are also
    copied to the new configuration path (e.g. certificates etc.).

    Args:
        config: The store configuration to copy.
        config_path: new path where the configuration copy will be loaded
            from.
        load_config_path: absolute path that will be used to load the copied
            configuration. This can be set to a value different from
            `config_path` if the configuration copy will be loaded from
            a different environment, e.g. when the configuration is copied
            to a container image and loaded using a different absolute path.
            This will be reflected in the paths and URLs encoded in the
            copied configuration.

    Returns:
        A new store configuration object that reflects the new configuration
        path.
    """
    assert isinstance(config, RestZenStoreConfiguration)
    config = config.copy(deep=True)

    # Load the certificate values back into the configuration
    config.expand_certificates()
    return config
expand_certificates(self)

Expands the certificates in the verify_ssl field.

Source code in zenml/zen_stores/rest_zen_store.py
def expand_certificates(self) -> None:
    """Expands the certificates in the verify_ssl field."""
    # Load the certificate values back into the configuration
    if isinstance(self.verify_ssl, str) and os.path.isfile(self.verify_ssl):
        with open(self.verify_ssl, "r") as f:
            self.verify_ssl = f.read()
supports_url_scheme(url) classmethod

Check if a URL scheme is supported by this store.

Parameters:

Name Type Description Default
url str

The URL to check.

required

Returns:

Type Description
bool

True if the URL scheme is supported, False otherwise.

Source code in zenml/zen_stores/rest_zen_store.py
@classmethod
def supports_url_scheme(cls, url: str) -> bool:
    """Check if a URL scheme is supported by this store.

    Args:
        url: The URL to check.

    Returns:
        True if the URL scheme is supported, False otherwise.
    """
    return urlparse(url).scheme in ("http", "https")
validate_url(url) classmethod

Validates that the URL is a well-formed REST store URL.

Parameters:

Name Type Description Default
url str

The URL to be validated.

required

Returns:

Type Description
str

The validated URL without trailing slashes.

Exceptions:

Type Description
ValueError

If the URL is not a well-formed REST store URL.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("url")
def validate_url(cls, url: str) -> str:
    """Validates that the URL is a well-formed REST store URL.

    Args:
        url: The URL to be validated.

    Returns:
        The validated URL without trailing slashes.

    Raises:
        ValueError: If the URL is not a well-formed REST store URL.
    """
    url = url.rstrip("/")
    scheme = re.search("^([a-z0-9]+://)", url)
    if scheme is None or scheme.group() not in ("https://", "http://"):
        raise ValueError(
            "Invalid URL for REST store: {url}. Should be in the form "
            "https://hostname[:port] or http://hostname[:port]."
        )

    # When running inside a container, if the URL uses localhost, the
    # target service will not be available. We try to replace localhost
    # with one of the special Docker or K3D internal hostnames.
    url = replace_localhost_with_internal_hostname(url)

    return url
validate_verify_ssl(verify_ssl) classmethod

Validates that the verify_ssl either points to a file or is a bool.

Parameters:

Name Type Description Default
verify_ssl Union[bool, str]

The verify_ssl value to be validated.

required

Returns:

Type Description
Union[bool, str]

The validated verify_ssl value.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("verify_ssl")
def validate_verify_ssl(
    cls, verify_ssl: Union[bool, str]
) -> Union[bool, str]:
    """Validates that the verify_ssl either points to a file or is a bool.

    Args:
        verify_ssl: The verify_ssl value to be validated.

    Returns:
        The validated verify_ssl value.
    """
    secret_folder = Path(
        GlobalConfiguration().local_stores_path,
        "certificates",
    )
    if isinstance(verify_ssl, bool) or verify_ssl.startswith(
        str(secret_folder)
    ):
        return verify_ssl

    if os.path.isfile(verify_ssl):
        with open(verify_ssl, "r") as f:
            verify_ssl = f.read()

    fileio.makedirs(str(secret_folder))
    file_path = Path(secret_folder, "ca_bundle.pem")
    with open(file_path, "w") as f:
        f.write(verify_ssl)
    file_path.chmod(0o600)
    verify_ssl = str(file_path)

    return verify_ssl
create_artifact(self, artifact)

Creates an artifact.

Parameters:

Name Type Description Default
artifact ArtifactRequestModel

The artifact to create.

required

Returns:

Type Description
ArtifactResponseModel

The created artifact.

Source code in zenml/zen_stores/rest_zen_store.py
def create_artifact(
    self, artifact: ArtifactRequestModel
) -> ArtifactResponseModel:
    """Creates an artifact.

    Args:
        artifact: The artifact to create.

    Returns:
        The created artifact.
    """
    return self._create_resource(
        resource=artifact,
        response_model=ArtifactResponseModel,
        route=ARTIFACTS,
    )
create_flavor(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_pipeline(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_project(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_role(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_role_assignment(self, role_assignment)

Creates a new role assignment.

Parameters:

Name Type Description Default
role_assignment RoleAssignmentRequestModel

The role assignment to create.

required

Returns:

Type Description
RoleAssignmentResponseModel

The newly created project.

Source code in zenml/zen_stores/rest_zen_store.py
def create_role_assignment(
    self, role_assignment: RoleAssignmentRequestModel
) -> RoleAssignmentResponseModel:
    """Creates a new role assignment.

    Args:
        role_assignment: The role assignment to create.

    Returns:
        The newly created project.
    """
    return self._create_resource(
        resource=role_assignment,
        route=ROLE_ASSIGNMENTS,
        response_model=RoleAssignmentResponseModel,
    )
create_run(self, pipeline_run)

Creates a pipeline run.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to create.

required

Returns:

Type Description
PipelineRunResponseModel

The created pipeline run.

Source code in zenml/zen_stores/rest_zen_store.py
def create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Creates a pipeline run.

    Args:
        pipeline_run: The pipeline run to create.

    Returns:
        The created pipeline run.
    """
    return self._create_project_scoped_resource(
        resource=pipeline_run,
        response_model=PipelineRunResponseModel,
        route=RUNS,
    )
create_run_step(self, step)

Creates a step.

Parameters:

Name Type Description Default
step StepRunRequestModel

The step to create.

required

Returns:

Type Description
StepRunResponseModel

The created step.

Source code in zenml/zen_stores/rest_zen_store.py
def create_run_step(
    self, step: StepRunRequestModel
) -> StepRunResponseModel:
    """Creates a step.

    Args:
        step: The step to create.

    Returns:
        The created step.
    """
    return self._create_resource(
        resource=step,
        response_model=StepRunResponseModel,
        route=STEPS,
    )
create_stack(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_stack_component(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_team(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_user(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete(self, path, params=None, **kwargs)

Make a DELETE request to the given endpoint path.

Parameters:

Name Type Description Default
path str

The path to the endpoint.

required
params Optional[Dict[str, Any]]

The query parameters to pass to the endpoint.

None
kwargs Any

Additional keyword arguments to pass to the request.

{}

Returns:

Type Description
Union[Dict[str, Any], List[Any], str, int, float, bool]

The response body.

Source code in zenml/zen_stores/rest_zen_store.py
def delete(
    self, path: str, params: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> Json:
    """Make a DELETE request to the given endpoint path.

    Args:
        path: The path to the endpoint.
        params: The query parameters to pass to the endpoint.
        kwargs: Additional keyword arguments to pass to the request.

    Returns:
        The response body.
    """
    logger.debug(f"Sending DELETE request to {path}...")
    return self._request(
        "DELETE", self.url + API + VERSION_1 + path, params=params, **kwargs
    )
delete_flavor(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_pipeline(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_project(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_role(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_role_assignment(self, role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
role_assignment_id UUID

The ID of the specific role assignment

required
Source code in zenml/zen_stores/rest_zen_store.py
def delete_role_assignment(self, role_assignment_id: UUID) -> None:
    """Delete a specific role assignment.

    Args:
        role_assignment_id: The ID of the specific role assignment
    """
    self._delete_resource(
        resource_id=role_assignment_id,
        route=ROLE_ASSIGNMENTS,
    )
delete_stack(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_stack_component(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_team(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_user(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
get(self, path, params=None, **kwargs)

Make a GET request to the given endpoint path.

Parameters:

Name Type Description Default
path str

The path to the endpoint.

required
params Optional[Dict[str, Any]]

The query parameters to pass to the endpoint.

None
kwargs Any

Additional keyword arguments to pass to the request.

{}

Returns:

Type Description
Union[Dict[str, Any], List[Any], str, int, float, bool]

The response body.

Source code in zenml/zen_stores/rest_zen_store.py
def get(
    self, path: str, params: Optional[Dict[str, Any]] = None, **kwargs: Any
) -> Json:
    """Make a GET request to the given endpoint path.

    Args:
        path: The path to the endpoint.
        params: The query parameters to pass to the endpoint.
        kwargs: Additional keyword arguments to pass to the request.

    Returns:
        The response body.
    """
    logger.debug(f"Sending GET request to {path}...")
    return self._request(
        "GET", self.url + API + VERSION_1 + path, params=params, **kwargs
    )
get_auth_user(self, user_name_or_id)

Gets the auth model to a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Exceptions:

Type Description
NotImplementedError

This method is only available for the SQLZenStore.

Source code in zenml/zen_stores/rest_zen_store.py
def get_auth_user(
    self, user_name_or_id: Union[str, UUID]
) -> "UserAuthModel":
    """Gets the auth model to a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Raises:
        NotImplementedError: This method is only available for the
            SQLZenStore.
    """
    raise NotImplementedError(
        "This method is only designed for use"
        " by the server endpoints. It is not designed"
        " to be called from the client side."
    )
get_flavor(self, flavor_id)

Get a stack component flavor by ID.

Parameters:

Name Type Description Default
flavor_id UUID

The ID of the stack component flavor to get.

required

Returns:

Type Description
FlavorResponseModel

The stack component flavor.

Source code in zenml/zen_stores/rest_zen_store.py
def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
    """Get a stack component flavor by ID.

    Args:
        flavor_id: The ID of the stack component flavor to get.

    Returns:
        The stack component flavor.
    """
    return self._get_resource(
        resource_id=flavor_id,
        route=FLAVORS,
        response_model=FlavorResponseModel,
    )
get_metadata_config(self, expand_certs=False)

Get the TFX metadata config of this ZenStore.

Parameters:

Name Type Description Default
expand_certs bool

Whether to expand the certificate paths in the connection config to their value.

False

Exceptions:

Type Description
ValueError

if the server response is invalid.

Returns:

Type Description
Union[ConnectionConfig, MetadataStoreClientConfig]

The TFX metadata config of this ZenStore.

Source code in zenml/zen_stores/rest_zen_store.py
def get_metadata_config(
    self, expand_certs: bool = False
) -> Union["ConnectionConfig", "MetadataStoreClientConfig"]:
    """Get the TFX metadata config of this ZenStore.

    Args:
        expand_certs: Whether to expand the certificate paths in the
            connection config to their value.

    Raises:
        ValueError: if the server response is invalid.

    Returns:
        The TFX metadata config of this ZenStore.
    """
    from google.protobuf.json_format import Parse, ParseError
    from ml_metadata.proto.metadata_store_pb2 import (
        ConnectionConfig,
        MetadataStoreClientConfig,
    )

    from zenml.zen_stores.sql_zen_store import SqlZenStoreConfiguration

    body = self.get(f"{METADATA_CONFIG}")
    if not isinstance(body, str):
        raise ValueError(
            f"Invalid response from server: {body}. Expected string."
        )

    # First try to parse the response as a ConnectionConfig, then as a
    # MetadataStoreClientConfig.
    try:
        metadata_config_pb = Parse(body, ConnectionConfig())
    except ParseError:
        return Parse(body, MetadataStoreClientConfig())

    # if the server returns a SQLite connection config, but the file is not
    # available locally, we need to replace the path with the local path of
    # the default local SQLite database
    if metadata_config_pb.HasField("sqlite") and not os.path.isfile(
        metadata_config_pb.sqlite.filename_uri
    ):
        message = (
            f"The ZenML server is using a SQLite database at "
            f"{metadata_config_pb.sqlite.filename_uri} that is not "
            f"available locally. Using the default local SQLite "
            f"database instead."
        )
        if not self.is_local_store():
            logger.warning(message)
        else:
            logger.debug(message)
        default_store_cfg = GlobalConfiguration().get_default_store()
        assert isinstance(default_store_cfg, SqlZenStoreConfiguration)
        return default_store_cfg.get_metadata_config()

    if metadata_config_pb.HasField("mysql"):
        # If the server returns a MySQL connection config with a hostname
        # that is a Docker or K3D internal hostname that cannot be resolved
        # locally, we need to replace it with localhost. We're assuming
        # that we're running on the host machine and the MySQL server can
        # be accessed via localhost.
        metadata_config_pb.mysql.host = (
            replace_internal_hostname_with_localhost(
                metadata_config_pb.mysql.host
            )
        )

        if not expand_certs and metadata_config_pb.mysql.HasField(
            "ssl_options"
        ):
            # Save the certificates in a secure location on disk
            secret_folder = Path(
                GlobalConfiguration().local_stores_path,
                "certificates",
            )
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                if not metadata_config_pb.mysql.ssl_options.HasField(
                    key.lstrip("ssl_")
                ):
                    continue
                content = getattr(
                    metadata_config_pb.mysql.ssl_options,
                    key.lstrip("ssl_"),
                )
                if content and not os.path.isfile(content):
                    fileio.makedirs(str(secret_folder))
                    file_path = Path(secret_folder, f"{key}.pem")
                    with open(file_path, "w") as f:
                        f.write(content)
                    file_path.chmod(0o600)
                    setattr(
                        metadata_config_pb.mysql.ssl_options,
                        key.lstrip("ssl_"),
                        str(file_path),
                    )

    return metadata_config_pb
get_or_create_run(self, pipeline_run)

Gets or creates a pipeline run.

If a run with the same ID or name already exists, it is returned. Otherwise, a new run is created.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to get or create.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Source code in zenml/zen_stores/rest_zen_store.py
def get_or_create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Gets or creates a pipeline run.

    If a run with the same ID or name already exists, it is returned.
    Otherwise, a new run is created.

    Args:
        pipeline_run: The pipeline run to get or create.

    Returns:
        The pipeline run.
    """
    return self._create_project_scoped_resource(
        resource=pipeline_run,
        route=RUNS,
        response_model=PipelineRunResponseModel,
        params={"get_if_exists": True},
    )
get_pipeline(self, pipeline_id)

Get a pipeline with a given ID.

Parameters:

Name Type Description Default
pipeline_id UUID

ID of the pipeline.

required

Returns:

Type Description
PipelineResponseModel

The pipeline.

Source code in zenml/zen_stores/rest_zen_store.py
def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
    """Get a pipeline with a given ID.

    Args:
        pipeline_id: ID of the pipeline.

    Returns:
        The pipeline.
    """
    return self._get_resource(
        resource_id=pipeline_id,
        route=PIPELINES,
        response_model=PipelineResponseModel,
    )
get_project(self, project_name_or_id)

Get an existing project by name or ID.

Parameters:

Name Type Description Default
project_name_or_id Union[uuid.UUID, str]

Name or ID of the project to get.

required

Returns:

Type Description
ProjectResponseModel

The requested project.

Source code in zenml/zen_stores/rest_zen_store.py
def get_project(
    self, project_name_or_id: Union[UUID, str]
) -> ProjectResponseModel:
    """Get an existing project by name or ID.

    Args:
        project_name_or_id: Name or ID of the project to get.

    Returns:
        The requested project.
    """
    return self._get_resource(
        resource_id=project_name_or_id,
        route=PROJECTS,
        response_model=ProjectResponseModel,
    )
get_role(self, role_name_or_id)

Gets a specific role.

Parameters:

Name Type Description Default
role_name_or_id Union[str, uuid.UUID]

Name or ID of the role to get.

required

Returns:

Type Description
RoleResponseModel

The requested role.

Source code in zenml/zen_stores/rest_zen_store.py
def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
    """Gets a specific role.

    Args:
        role_name_or_id: Name or ID of the role to get.

    Returns:
        The requested role.
    """
    return self._get_resource(
        resource_id=role_name_or_id,
        route=ROLES,
        response_model=RoleResponseModel,
    )
get_role_assignment(self, role_assignment_id)

Get an existing role assignment by name or ID.

Parameters:

Name Type Description Default
role_assignment_id UUID

Name or ID of the role assignment to get.

required

Returns:

Type Description
RoleAssignmentResponseModel

The requested project.

Source code in zenml/zen_stores/rest_zen_store.py
def get_role_assignment(
    self, role_assignment_id: UUID
) -> RoleAssignmentResponseModel:
    """Get an existing role assignment by name or ID.

    Args:
        role_assignment_id: Name or ID of the role assignment to get.

    Returns:
        The requested project.
    """
    return self._get_resource(
        resource_id=role_assignment_id,
        route=ROLE_ASSIGNMENTS,
        response_model=RoleAssignmentResponseModel,
    )
get_run(self, run_name_or_id)

Gets a pipeline run.

Parameters:

Name Type Description Default
run_name_or_id Union[uuid.UUID, str]

The name or ID of the pipeline run to get.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Source code in zenml/zen_stores/rest_zen_store.py
def get_run(
    self, run_name_or_id: Union[UUID, str]
) -> PipelineRunResponseModel:
    """Gets a pipeline run.

    Args:
        run_name_or_id: The name or ID of the pipeline run to get.

    Returns:
        The pipeline run.
    """
    self._sync_runs()
    return self._get_resource(
        resource_id=run_name_or_id,
        route=RUNS,
        response_model=PipelineRunResponseModel,
    )
get_run_step(self, step_id)

Get a step by ID.

Parameters:

Name Type Description Default
step_id UUID

The ID of the step to get.

required

Returns:

Type Description
StepRunResponseModel

The step.

Source code in zenml/zen_stores/rest_zen_store.py
def get_run_step(self, step_id: UUID) -> StepRunResponseModel:
    """Get a step by ID.

    Args:
        step_id: The ID of the step to get.

    Returns:
        The step.
    """
    self._sync_runs()
    return self._get_resource(
        resource_id=step_id,
        route=STEPS,
        response_model=StepRunResponseModel,
    )
get_run_step_inputs(self, step_id)

Get a list of inputs for a specific step.

Parameters:

Name Type Description Default
step_id UUID

The id of the step to get inputs for.

required

Returns:

Type Description
Dict[str, zenml.models.artifact_models.ArtifactResponseModel]

A dict mapping artifact names to the input artifacts for the step.

Exceptions:

Type Description
ValueError

if the response from the API is not a dict.

Source code in zenml/zen_stores/rest_zen_store.py
def get_run_step_inputs(
    self, step_id: UUID
) -> Dict[str, ArtifactResponseModel]:
    """Get a list of inputs for a specific step.

    Args:
        step_id: The id of the step to get inputs for.

    Returns:
        A dict mapping artifact names to the input artifacts for the step.

    Raises:
        ValueError: if the response from the API is not a dict.
    """
    body = self.get(f"{STEPS}/{str(step_id)}{INPUTS}")
    if not isinstance(body, dict):
        raise ValueError(
            f"Bad API Response. Expected dict, got {type(body)}"
        )
    return {
        name: ArtifactResponseModel.parse_obj(entry)
        for name, entry in body.items()
    }
get_stack(self, stack_id)

Get a stack by its unique ID.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack to get.

required

Returns:

Type Description
StackResponseModel

The stack with the given ID.

Source code in zenml/zen_stores/rest_zen_store.py
def get_stack(self, stack_id: UUID) -> StackResponseModel:
    """Get a stack by its unique ID.

    Args:
        stack_id: The ID of the stack to get.

    Returns:
        The stack with the given ID.
    """
    return self._get_resource(
        resource_id=stack_id,
        route=STACKS,
        response_model=StackResponseModel,
    )
get_stack_component(self, component_id)

Get a stack component by ID.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to get.

required

Returns:

Type Description
ComponentResponseModel

The stack component.

Source code in zenml/zen_stores/rest_zen_store.py
def get_stack_component(self, component_id: UUID) -> ComponentResponseModel:
    """Get a stack component by ID.

    Args:
        component_id: The ID of the stack component to get.

    Returns:
        The stack component.
    """
    return self._get_resource(
        resource_id=component_id,
        route=STACK_COMPONENTS,
        response_model=ComponentResponseModel,
    )
get_store_info(self)

Get information about the server.

Returns:

Type Description
ServerModel

Information about the server.

Source code in zenml/zen_stores/rest_zen_store.py
def get_store_info(self) -> ServerModel:
    """Get information about the server.

    Returns:
        Information about the server.
    """
    body = self.get(INFO)
    return ServerModel.parse_obj(body)
get_team(self, team_name_or_id)

Gets a specific team.

Parameters:

Name Type Description Default
team_name_or_id Union[str, uuid.UUID]

Name or ID of the team to get.

required

Returns:

Type Description
TeamResponseModel

The requested team.

Source code in zenml/zen_stores/rest_zen_store.py
def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
    """Gets a specific team.

    Args:
        team_name_or_id: Name or ID of the team to get.

    Returns:
        The requested team.
    """
    return self._get_resource(
        resource_id=team_name_or_id,
        route=TEAMS,
        response_model=TeamResponseModel,
    )
get_user(self, user_name_or_id)

Gets a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Returns:

Type Description
UserResponseModel

The requested user, if it was found.

Source code in zenml/zen_stores/rest_zen_store.py
def get_user(self, user_name_or_id: Union[str, UUID]) -> UserResponseModel:
    """Gets a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Returns:
        The requested user, if it was found.
    """
    return self._get_resource(
        resource_id=user_name_or_id,
        route=USERS,
        response_model=UserResponseModel,
    )
list_artifacts(self, artifact_uri=None, parent_step_id=None)

Lists all artifacts.

Parameters:

Name Type Description Default
artifact_uri Optional[str]

If specified, only artifacts with the given URI will be returned.

None
parent_step_id Optional[uuid.UUID]

If specified, only artifacts for the given step run will be returned.

None

Returns:

Type Description
List[zenml.models.artifact_models.ArtifactResponseModel]

A list of all artifacts.

Source code in zenml/zen_stores/rest_zen_store.py
def list_artifacts(
    self,
    artifact_uri: Optional[str] = None,
    parent_step_id: Optional[UUID] = None,
) -> List[ArtifactResponseModel]:
    """Lists all artifacts.

    Args:
        artifact_uri: If specified, only artifacts with the given URI will
            be returned.
        parent_step_id: If specified, only artifacts for the given step run
            will be returned.

    Returns:
        A list of all artifacts.
    """
    self._sync_runs()
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=ARTIFACTS,
        resource_model=ArtifactResponseModel,
        response_model=ArtifactResponseModel,
        **filters,
    )
list_flavors(self, project_name_or_id=None, user_name_or_id=None, component_type=None, name=None, is_shared=None)

List all stack component flavors matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

Optionally filter by the Project to which the component flavors belong

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter by the owner

None
component_type Optional[zenml.enums.StackComponentType]

Optionally filter by type of stack component

None
name Optional[str]

Optionally filter flavors by name

None
is_shared Optional[bool]

Optionally filter out flavors by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.flavor_models.FlavorResponseModel]

List of all the stack component flavors matching the given criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_flavors(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    component_type: Optional[StackComponentType] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[FlavorResponseModel]:
    """List all stack component flavors matching the given filter criteria.

    Args:
        project_name_or_id: Optionally filter by the Project to which the
            component flavors belong
        user_name_or_id: Optionally filter by the owner
        component_type: Optionally filter by type of stack component
        name: Optionally filter flavors by name
        is_shared: Optionally filter out flavors by whether they are
            shared or not

    Returns:
        List of all the stack component flavors matching the given criteria.
    """
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=FLAVORS,
        response_model=FlavorResponseModel,
        **filters,
    )
list_pipelines(self, project_name_or_id=None, user_name_or_id=None, name=None)

List all pipelines in the project.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

If provided, only list pipelines in this project.

None
user_name_or_id Union[str, uuid.UUID]

If provided, only list pipelines from this user.

None
name Optional[str]

If provided, only list pipelines with this name.

None

Returns:

Type Description
List[zenml.models.pipeline_models.PipelineResponseModel]

A list of pipelines.

Source code in zenml/zen_stores/rest_zen_store.py
def list_pipelines(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    name: Optional[str] = None,
) -> List[PipelineResponseModel]:
    """List all pipelines in the project.

    Args:
        project_name_or_id: If provided, only list pipelines in this
            project.
        user_name_or_id: If provided, only list pipelines from this user.
        name: If provided, only list pipelines with this name.

    Returns:
        A list of pipelines.
    """
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=PIPELINES,
        response_model=PipelineResponseModel,
        **filters,
    )
list_projects(self, name=None)

List all projects.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.project_models.ProjectResponseModel]

A list of all projects.

Source code in zenml/zen_stores/rest_zen_store.py
def list_projects(
    self, name: Optional[str] = None
) -> List[ProjectResponseModel]:
    """List all projects.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all projects.
    """
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=PROJECTS,
        response_model=ProjectResponseModel,
        **filters,
    )
list_role_assignments(self, project_name_or_id=None, role_name_or_id=None, team_name_or_id=None, user_name_or_id=None)

List all role assignments.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

If provided, only list assignments for the given project

None
role_name_or_id Union[str, uuid.UUID]

If provided, only list assignments of the given role

None
team_name_or_id Union[str, uuid.UUID]

If provided, only list assignments for the given team

None
user_name_or_id Union[str, uuid.UUID]

If provided, only list assignments for the given user

None

Returns:

Type Description
List[zenml.models.role_assignment_models.RoleAssignmentResponseModel]

A list of all role assignments.

Source code in zenml/zen_stores/rest_zen_store.py
def list_role_assignments(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    role_name_or_id: Optional[Union[str, UUID]] = None,
    team_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
) -> List[RoleAssignmentResponseModel]:
    """List all role assignments.

    Args:
        project_name_or_id: If provided, only list assignments for the given
            project
        role_name_or_id: If provided, only list assignments of the given
            role
        team_name_or_id: If provided, only list assignments for the given
            team
        user_name_or_id: If provided, only list assignments for the given
            user

    Returns:
        A list of all role assignments.
    """
    return self._list_resources(
        route=f"{ROLE_ASSIGNMENTS}",
        project_name_or_id=project_name_or_id,
        role_name_or_id=role_name_or_id,
        team_name_or_id=team_name_or_id,
        user_name_or_id=user_name_or_id,
        response_model=RoleAssignmentResponseModel,
    )
list_roles(self, name=None)

List all roles.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.role_models.RoleResponseModel]

A list of all roles.

Source code in zenml/zen_stores/rest_zen_store.py
def list_roles(self, name: Optional[str] = None) -> List[RoleResponseModel]:
    """List all roles.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all roles.
    """
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=ROLES,
        response_model=RoleResponseModel,
        **filters,
    )
list_run_steps(self, run_id=None)

Get all run steps.

Parameters:

Name Type Description Default
run_id Optional[uuid.UUID]

If provided, only return steps for this pipeline run.

None

Returns:

Type Description
List[zenml.models.step_run_models.StepRunResponseModel]

A list of all run steps.

Source code in zenml/zen_stores/rest_zen_store.py
def list_run_steps(
    self, run_id: Optional[UUID] = None
) -> List[StepRunResponseModel]:
    """Get all run steps.

    Args:
        run_id: If provided, only return steps for this pipeline run.

    Returns:
        A list of all run steps.
    """
    self._sync_runs()
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=STEPS,
        resource_model=StepRunResponseModel,
        response_model=StepRunResponseModel,
        **filters,
    )
list_runs(self, name=None, project_name_or_id=None, stack_id=None, component_id=None, user_name_or_id=None, pipeline_id=None, unlisted=False)

Gets all pipeline runs.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

If provided, only return runs for this project.

None
stack_id Optional[uuid.UUID]

If provided, only return runs for this stack.

None
component_id Optional[uuid.UUID]

Optionally filter for runs that used the component

None
name Optional[str]

Run name if provided

None
user_name_or_id Union[str, uuid.UUID]

If provided, only return runs for this user.

None
pipeline_id Optional[uuid.UUID]

If provided, only return runs for this pipeline.

None
unlisted bool

If True, only return unlisted runs that are not associated with any pipeline (filter by pipeline_id==None).

False

Returns:

Type Description
List[zenml.models.pipeline_run_models.PipelineRunResponseModel]

A list of all pipeline runs.

Source code in zenml/zen_stores/rest_zen_store.py
def list_runs(
    self,
    name: Optional[str] = None,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    stack_id: Optional[UUID] = None,
    component_id: Optional[UUID] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    pipeline_id: Optional[UUID] = None,
    unlisted: bool = False,
) -> List[PipelineRunResponseModel]:
    """Gets all pipeline runs.

    Args:
        project_name_or_id: If provided, only return runs for this project.
        stack_id: If provided, only return runs for this stack.
        component_id: Optionally filter for runs that used the
                      component
        name: Run name if provided
        user_name_or_id: If provided, only return runs for this user.
        pipeline_id: If provided, only return runs for this pipeline.
        unlisted: If True, only return unlisted runs that are not
            associated with any pipeline (filter by `pipeline_id==None`).

    Returns:
        A list of all pipeline runs.
    """
    self._sync_runs()
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=RUNS,
        response_model=PipelineRunResponseModel,
        **filters,
    )
list_stack_components(self, project_name_or_id=None, user_name_or_id=None, type=None, flavor_name=None, name=None, is_shared=None)

List all stack components matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

The ID or name of the Project to which the stack components belong

None
type Optional[str]

Optionally filter by type of stack component

None
flavor_name Optional[str]

Optionally filter by flavor

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter stack components by the owner

None
name Optional[str]

Optionally filter stack component by name

None
is_shared Optional[bool]

Optionally filter out stack component by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.component_models.ComponentResponseModel]

A list of all stack components matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_stack_components(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    type: Optional[str] = None,
    flavor_name: Optional[str] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[ComponentResponseModel]:
    """List all stack components matching the given filter criteria.

    Args:
        project_name_or_id: The ID or name of the Project to which the stack
            components belong
        type: Optionally filter by type of stack component
        flavor_name: Optionally filter by flavor
        user_name_or_id: Optionally filter stack components by the owner
        name: Optionally filter stack component by name
        is_shared: Optionally filter out stack component by whether they are
            shared or not

    Returns:
        A list of all stack components matching the filter criteria.
    """
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=STACK_COMPONENTS,
        response_model=ComponentResponseModel,
        **filters,
    )
list_stacks(self, project_name_or_id=None, user_name_or_id=None, component_id=None, name=None, is_shared=None)

List all stacks matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

ID or name of the Project containing the stack

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter stacks by their owner

None
component_id Optional[uuid.UUID]

Optionally filter for stacks that contain the component

None
name Optional[str]

Optionally filter stacks by their name

None
is_shared Optional[bool]

Optionally filter out stacks by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.stack_models.StackResponseModel]

A list of all stacks matching the filter criteria.

Source code in zenml/zen_stores/rest_zen_store.py
def list_stacks(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    component_id: Optional[UUID] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[StackResponseModel]:
    """List all stacks matching the given filter criteria.

    Args:
        project_name_or_id: ID or name of the Project containing the stack
        user_name_or_id: Optionally filter stacks by their owner
        component_id: Optionally filter for stacks that contain the
                      component
        name: Optionally filter stacks by their name
        is_shared: Optionally filter out stacks by whether they are shared
            or not

    Returns:
        A list of all stacks matching the filter criteria.
    """
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=STACKS,
        response_model=StackResponseModel,
        **filters,
    )
list_teams(self, name=None)

List all teams.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.team_models.TeamResponseModel]

A list of all teams.

Source code in zenml/zen_stores/rest_zen_store.py
def list_teams(self, name: Optional[str] = None) -> List[TeamResponseModel]:
    """List all teams.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all teams.
    """
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=TEAMS,
        response_model=TeamResponseModel,
        **filters,
    )
list_users(self, name=None)

List all users.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.user_models.UserResponseModel]

A list of all users.

Source code in zenml/zen_stores/rest_zen_store.py
def list_users(self, name: Optional[str] = None) -> List[UserResponseModel]:
    """List all users.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all users.
    """
    filters = locals()
    filters.pop("self")
    return self._list_resources(
        route=USERS,
        response_model=UserResponseModel,
        **filters,
    )
post(self, path, body, params=None, **kwargs)

Make a POST request to the given endpoint path.

Parameters:

Name Type Description Default
path str

The path to the endpoint.

required
body BaseModel

The body to send.

required
params Optional[Dict[str, Any]]

The query parameters to pass to the endpoint.

None
kwargs Any

Additional keyword arguments to pass to the request.

{}

Returns:

Type Description
Union[Dict[str, Any], List[Any], str, int, float, bool]

The response body.

Source code in zenml/zen_stores/rest_zen_store.py
def post(
    self,
    path: str,
    body: BaseModel,
    params: Optional[Dict[str, Any]] = None,
    **kwargs: Any,
) -> Json:
    """Make a POST request to the given endpoint path.

    Args:
        path: The path to the endpoint.
        body: The body to send.
        params: The query parameters to pass to the endpoint.
        kwargs: Additional keyword arguments to pass to the request.

    Returns:
        The response body.
    """
    logger.debug(f"Sending POST request to {path}...")
    return self._request(
        "POST",
        self.url + API + VERSION_1 + path,
        data=body.json(),
        params=params,
        **kwargs,
    )
put(self, path, body, params=None, **kwargs)

Make a PUT request to the given endpoint path.

Parameters:

Name Type Description Default
path str

The path to the endpoint.

required
body BaseModel

The body to send.

required
params Optional[Dict[str, Any]]

The query parameters to pass to the endpoint.

None
kwargs Any

Additional keyword arguments to pass to the request.

{}

Returns:

Type Description
Union[Dict[str, Any], List[Any], str, int, float, bool]

The response body.

Source code in zenml/zen_stores/rest_zen_store.py
def put(
    self,
    path: str,
    body: BaseModel,
    params: Optional[Dict[str, Any]] = None,
    **kwargs: Any,
) -> Json:
    """Make a PUT request to the given endpoint path.

    Args:
        path: The path to the endpoint.
        body: The body to send.
        params: The query parameters to pass to the endpoint.
        kwargs: Additional keyword arguments to pass to the request.

    Returns:
        The response body.
    """
    logger.debug(f"Sending PUT request to {path}...")
    return self._request(
        "PUT",
        self.url + API + VERSION_1 + path,
        data=body.json(exclude_unset=True),
        params=params,
        **kwargs,
    )
update_pipeline(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_project(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_role(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_run(self, run_id, run_update)

Updates a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to update.

required
run_update PipelineRunUpdateModel

The update to be applied to the pipeline run.

required

Returns:

Type Description
PipelineRunResponseModel

The updated pipeline run.

Source code in zenml/zen_stores/rest_zen_store.py
def update_run(
    self, run_id: UUID, run_update: PipelineRunUpdateModel
) -> PipelineRunResponseModel:
    """Updates a pipeline run.

    Args:
        run_id: The ID of the pipeline run to update.
        run_update: The update to be applied to the pipeline run.


    Returns:
        The updated pipeline run.
    """
    return self._update_resource(
        resource_id=run_id,
        resource_update=run_update,
        response_model=PipelineRunResponseModel,
        route=RUNS,
    )
update_run_step(self, step_id, step_update)

Updates a step.

Parameters:

Name Type Description Default
step_id UUID

The ID of the step to update.

required
step_update StepRunUpdateModel

The update to be applied to the step.

required

Returns:

Type Description
StepRunResponseModel

The updated step.

Source code in zenml/zen_stores/rest_zen_store.py
def update_run_step(
    self,
    step_id: UUID,
    step_update: StepRunUpdateModel,
) -> StepRunResponseModel:
    """Updates a step.

    Args:
        step_id: The ID of the step to update.
        step_update: The update to be applied to the step.

    Returns:
        The updated step.
    """
    return self._update_resource(
        resource_id=step_id,
        resource_update=step_update,
        response_model=StepRunResponseModel,
        route=STEPS,
    )
update_stack(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_stack_component(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_team(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_user(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/rest_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result

RestZenStoreConfiguration (StoreConfiguration) pydantic-model

REST ZenML store configuration.

Attributes:

Name Type Description
username str

The username to use to connect to the Zen server.

password str

The password to use to connect to the Zen server.

verify_ssl Union[bool, str]

Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use or the CA bundle value itself.

http_timeout int

The timeout to use for all requests.

Source code in zenml/zen_stores/rest_zen_store.py
class RestZenStoreConfiguration(StoreConfiguration):
    """REST ZenML store configuration.

    Attributes:
        username: The username to use to connect to the Zen server.
        password: The password to use to connect to the Zen server.
        verify_ssl: Either a boolean, in which case it controls whether we
            verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use or the CA bundle value itself.
        http_timeout: The timeout to use for all requests.
    """

    type: StoreType = StoreType.REST
    username: str
    password: str = ""
    verify_ssl: Union[bool, str] = True
    http_timeout: int = DEFAULT_HTTP_TIMEOUT

    @validator("url")
    def validate_url(cls, url: str) -> str:
        """Validates that the URL is a well-formed REST store URL.

        Args:
            url: The URL to be validated.

        Returns:
            The validated URL without trailing slashes.

        Raises:
            ValueError: If the URL is not a well-formed REST store URL.
        """
        url = url.rstrip("/")
        scheme = re.search("^([a-z0-9]+://)", url)
        if scheme is None or scheme.group() not in ("https://", "http://"):
            raise ValueError(
                "Invalid URL for REST store: {url}. Should be in the form "
                "https://hostname[:port] or http://hostname[:port]."
            )

        # When running inside a container, if the URL uses localhost, the
        # target service will not be available. We try to replace localhost
        # with one of the special Docker or K3D internal hostnames.
        url = replace_localhost_with_internal_hostname(url)

        return url

    @validator("verify_ssl")
    def validate_verify_ssl(
        cls, verify_ssl: Union[bool, str]
    ) -> Union[bool, str]:
        """Validates that the verify_ssl either points to a file or is a bool.

        Args:
            verify_ssl: The verify_ssl value to be validated.

        Returns:
            The validated verify_ssl value.
        """
        secret_folder = Path(
            GlobalConfiguration().local_stores_path,
            "certificates",
        )
        if isinstance(verify_ssl, bool) or verify_ssl.startswith(
            str(secret_folder)
        ):
            return verify_ssl

        if os.path.isfile(verify_ssl):
            with open(verify_ssl, "r") as f:
                verify_ssl = f.read()

        fileio.makedirs(str(secret_folder))
        file_path = Path(secret_folder, "ca_bundle.pem")
        with open(file_path, "w") as f:
            f.write(verify_ssl)
        file_path.chmod(0o600)
        verify_ssl = str(file_path)

        return verify_ssl

    @classmethod
    def supports_url_scheme(cls, url: str) -> bool:
        """Check if a URL scheme is supported by this store.

        Args:
            url: The URL to check.

        Returns:
            True if the URL scheme is supported, False otherwise.
        """
        return urlparse(url).scheme in ("http", "https")

    def expand_certificates(self) -> None:
        """Expands the certificates in the verify_ssl field."""
        # Load the certificate values back into the configuration
        if isinstance(self.verify_ssl, str) and os.path.isfile(self.verify_ssl):
            with open(self.verify_ssl, "r") as f:
                self.verify_ssl = f.read()

    @classmethod
    def copy_configuration(
        cls,
        config: "StoreConfiguration",
        config_path: str,
        load_config_path: Optional[PurePath] = None,
    ) -> "StoreConfiguration":
        """Create a copy of the store config using a different path.

        This method is used to create a copy of the store configuration that can
        be loaded using a different configuration path or in the context of a
        new environment, such as a container image.

        The configuration files accompanying the store configuration are also
        copied to the new configuration path (e.g. certificates etc.).

        Args:
            config: The store configuration to copy.
            config_path: new path where the configuration copy will be loaded
                from.
            load_config_path: absolute path that will be used to load the copied
                configuration. This can be set to a value different from
                `config_path` if the configuration copy will be loaded from
                a different environment, e.g. when the configuration is copied
                to a container image and loaded using a different absolute path.
                This will be reflected in the paths and URLs encoded in the
                copied configuration.

        Returns:
            A new store configuration object that reflects the new configuration
            path.
        """
        assert isinstance(config, RestZenStoreConfiguration)
        config = config.copy(deep=True)

        # Load the certificate values back into the configuration
        config.expand_certificates()
        return config

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the `verify_ssl` attribute can be expanded to the contents
        # of the certificate file.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/rest_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the `verify_ssl` attribute can be expanded to the contents
    # of the certificate file.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
copy_configuration(config, config_path, load_config_path=None) classmethod

Create a copy of the store config using a different path.

This method is used to create a copy of the store configuration that can be loaded using a different configuration path or in the context of a new environment, such as a container image.

The configuration files accompanying the store configuration are also copied to the new configuration path (e.g. certificates etc.).

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to copy.

required
config_path str

new path where the configuration copy will be loaded from.

required
load_config_path Optional[pathlib.PurePath]

absolute path that will be used to load the copied configuration. This can be set to a value different from config_path if the configuration copy will be loaded from a different environment, e.g. when the configuration is copied to a container image and loaded using a different absolute path. This will be reflected in the paths and URLs encoded in the copied configuration.

None

Returns:

Type Description
StoreConfiguration

A new store configuration object that reflects the new configuration path.

Source code in zenml/zen_stores/rest_zen_store.py
@classmethod
def copy_configuration(
    cls,
    config: "StoreConfiguration",
    config_path: str,
    load_config_path: Optional[PurePath] = None,
) -> "StoreConfiguration":
    """Create a copy of the store config using a different path.

    This method is used to create a copy of the store configuration that can
    be loaded using a different configuration path or in the context of a
    new environment, such as a container image.

    The configuration files accompanying the store configuration are also
    copied to the new configuration path (e.g. certificates etc.).

    Args:
        config: The store configuration to copy.
        config_path: new path where the configuration copy will be loaded
            from.
        load_config_path: absolute path that will be used to load the copied
            configuration. This can be set to a value different from
            `config_path` if the configuration copy will be loaded from
            a different environment, e.g. when the configuration is copied
            to a container image and loaded using a different absolute path.
            This will be reflected in the paths and URLs encoded in the
            copied configuration.

    Returns:
        A new store configuration object that reflects the new configuration
        path.
    """
    assert isinstance(config, RestZenStoreConfiguration)
    config = config.copy(deep=True)

    # Load the certificate values back into the configuration
    config.expand_certificates()
    return config
expand_certificates(self)

Expands the certificates in the verify_ssl field.

Source code in zenml/zen_stores/rest_zen_store.py
def expand_certificates(self) -> None:
    """Expands the certificates in the verify_ssl field."""
    # Load the certificate values back into the configuration
    if isinstance(self.verify_ssl, str) and os.path.isfile(self.verify_ssl):
        with open(self.verify_ssl, "r") as f:
            self.verify_ssl = f.read()
supports_url_scheme(url) classmethod

Check if a URL scheme is supported by this store.

Parameters:

Name Type Description Default
url str

The URL to check.

required

Returns:

Type Description
bool

True if the URL scheme is supported, False otherwise.

Source code in zenml/zen_stores/rest_zen_store.py
@classmethod
def supports_url_scheme(cls, url: str) -> bool:
    """Check if a URL scheme is supported by this store.

    Args:
        url: The URL to check.

    Returns:
        True if the URL scheme is supported, False otherwise.
    """
    return urlparse(url).scheme in ("http", "https")
validate_url(url) classmethod

Validates that the URL is a well-formed REST store URL.

Parameters:

Name Type Description Default
url str

The URL to be validated.

required

Returns:

Type Description
str

The validated URL without trailing slashes.

Exceptions:

Type Description
ValueError

If the URL is not a well-formed REST store URL.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("url")
def validate_url(cls, url: str) -> str:
    """Validates that the URL is a well-formed REST store URL.

    Args:
        url: The URL to be validated.

    Returns:
        The validated URL without trailing slashes.

    Raises:
        ValueError: If the URL is not a well-formed REST store URL.
    """
    url = url.rstrip("/")
    scheme = re.search("^([a-z0-9]+://)", url)
    if scheme is None or scheme.group() not in ("https://", "http://"):
        raise ValueError(
            "Invalid URL for REST store: {url}. Should be in the form "
            "https://hostname[:port] or http://hostname[:port]."
        )

    # When running inside a container, if the URL uses localhost, the
    # target service will not be available. We try to replace localhost
    # with one of the special Docker or K3D internal hostnames.
    url = replace_localhost_with_internal_hostname(url)

    return url
validate_verify_ssl(verify_ssl) classmethod

Validates that the verify_ssl either points to a file or is a bool.

Parameters:

Name Type Description Default
verify_ssl Union[bool, str]

The verify_ssl value to be validated.

required

Returns:

Type Description
Union[bool, str]

The validated verify_ssl value.

Source code in zenml/zen_stores/rest_zen_store.py
@validator("verify_ssl")
def validate_verify_ssl(
    cls, verify_ssl: Union[bool, str]
) -> Union[bool, str]:
    """Validates that the verify_ssl either points to a file or is a bool.

    Args:
        verify_ssl: The verify_ssl value to be validated.

    Returns:
        The validated verify_ssl value.
    """
    secret_folder = Path(
        GlobalConfiguration().local_stores_path,
        "certificates",
    )
    if isinstance(verify_ssl, bool) or verify_ssl.startswith(
        str(secret_folder)
    ):
        return verify_ssl

    if os.path.isfile(verify_ssl):
        with open(verify_ssl, "r") as f:
            verify_ssl = f.read()

    fileio.makedirs(str(secret_folder))
    file_path = Path(secret_folder, "ca_bundle.pem")
    with open(file_path, "w") as f:
        f.write(verify_ssl)
    file_path.chmod(0o600)
    verify_ssl = str(file_path)

    return verify_ssl

schemas special

SQL Model Implementations.

artifact_schemas

SQLModel implementation of artifact tables.

ArtifactSchema (NamedSchema) pydantic-model

SQL Model for artifacts of steps.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
class ArtifactSchema(NamedSchema, table=True):
    """SQL Model for artifacts of steps."""

    __tablename__ = "artifacts"

    parent_step_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="parent_step_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    producer_step_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="producer_step_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )

    type: ArtifactType
    uri: str
    materializer: str
    data_type: str
    is_cached: bool

    mlmd_id: Optional[int] = Field(default=None, nullable=True)
    mlmd_parent_step_id: Optional[int] = Field(default=None, nullable=True)
    mlmd_producer_step_id: Optional[int] = Field(default=None, nullable=True)

    @classmethod
    def from_request(
        cls, artifact_request: ArtifactRequestModel
    ) -> "ArtifactSchema":
        """Convert an `ArtifactRequestModel` to an `ArtifactSchema`.

        Args:
            artifact_request: The request model to convert.

        Returns:
            The converted schema.
        """
        return cls(
            name=artifact_request.name,
            parent_step_id=artifact_request.parent_step_id,
            producer_step_id=artifact_request.producer_step_id,
            type=artifact_request.type,
            uri=artifact_request.uri,
            materializer=artifact_request.materializer,
            data_type=artifact_request.data_type,
            is_cached=artifact_request.is_cached,
            mlmd_id=artifact_request.mlmd_id,
            mlmd_parent_step_id=artifact_request.mlmd_parent_step_id,
            mlmd_producer_step_id=artifact_request.mlmd_producer_step_id,
        )

    def to_model(self) -> ArtifactResponseModel:
        """Convert an `ArtifactSchema` to an `ArtifactModel`.

        Returns:
            The created `ArtifactModel`.
        """
        return ArtifactResponseModel(
            id=self.id,
            name=self.name,
            parent_step_id=self.parent_step_id,
            producer_step_id=self.producer_step_id,
            type=self.type,
            uri=self.uri,
            materializer=self.materializer,
            data_type=self.data_type,
            is_cached=self.is_cached,
            mlmd_id=self.mlmd_id,
            mlmd_parent_step_id=self.mlmd_parent_step_id,
            mlmd_producer_step_id=self.mlmd_producer_step_id,
            created=self.created,
            updated=self.updated,
        )
from_request(artifact_request) classmethod

Convert an ArtifactRequestModel to an ArtifactSchema.

Parameters:

Name Type Description Default
artifact_request ArtifactRequestModel

The request model to convert.

required

Returns:

Type Description
ArtifactSchema

The converted schema.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
@classmethod
def from_request(
    cls, artifact_request: ArtifactRequestModel
) -> "ArtifactSchema":
    """Convert an `ArtifactRequestModel` to an `ArtifactSchema`.

    Args:
        artifact_request: The request model to convert.

    Returns:
        The converted schema.
    """
    return cls(
        name=artifact_request.name,
        parent_step_id=artifact_request.parent_step_id,
        producer_step_id=artifact_request.producer_step_id,
        type=artifact_request.type,
        uri=artifact_request.uri,
        materializer=artifact_request.materializer,
        data_type=artifact_request.data_type,
        is_cached=artifact_request.is_cached,
        mlmd_id=artifact_request.mlmd_id,
        mlmd_parent_step_id=artifact_request.mlmd_parent_step_id,
        mlmd_producer_step_id=artifact_request.mlmd_producer_step_id,
    )
to_model(self)

Convert an ArtifactSchema to an ArtifactModel.

Returns:

Type Description
ArtifactResponseModel

The created ArtifactModel.

Source code in zenml/zen_stores/schemas/artifact_schemas.py
def to_model(self) -> ArtifactResponseModel:
    """Convert an `ArtifactSchema` to an `ArtifactModel`.

    Returns:
        The created `ArtifactModel`.
    """
    return ArtifactResponseModel(
        id=self.id,
        name=self.name,
        parent_step_id=self.parent_step_id,
        producer_step_id=self.producer_step_id,
        type=self.type,
        uri=self.uri,
        materializer=self.materializer,
        data_type=self.data_type,
        is_cached=self.is_cached,
        mlmd_id=self.mlmd_id,
        mlmd_parent_step_id=self.mlmd_parent_step_id,
        mlmd_producer_step_id=self.mlmd_producer_step_id,
        created=self.created,
        updated=self.updated,
    )

base_schemas

Base classes for SQLModel schemas.

BaseSchema (SQLModel) pydantic-model

Base SQL Model for ZenML entities.

Source code in zenml/zen_stores/schemas/base_schemas.py
class BaseSchema(SQLModel):
    """Base SQL Model for ZenML entities."""

    id: UUID = Field(default_factory=uuid4, primary_key=True)
    created: datetime = Field(default_factory=datetime.now)
    updated: datetime = Field(default_factory=datetime.now)
NamedSchema (BaseSchema) pydantic-model

Base Named SQL Model.

Source code in zenml/zen_stores/schemas/base_schemas.py
class NamedSchema(BaseSchema):
    """Base Named SQL Model."""

    name: str
ShareableSchema (NamedSchema) pydantic-model

Base shareable SQL Model.

Source code in zenml/zen_stores/schemas/base_schemas.py
class ShareableSchema(NamedSchema):
    """Base shareable SQL Model."""

    is_shared: bool

component_schemas

SQL Model Implementations for Stack Components.

StackComponentSchema (ShareableSchema) pydantic-model

SQL Model for stack components.

Source code in zenml/zen_stores/schemas/component_schemas.py
class StackComponentSchema(ShareableSchema, table=True):
    """SQL Model for stack components."""

    __tablename__ = "stack_component"

    type: StackComponentType
    flavor: str
    configuration: bytes

    project_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=ProjectSchema.__tablename__,
        source_column="project_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    project: "ProjectSchema" = Relationship(back_populates="components")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: "UserSchema" = Relationship(back_populates="components")
    stacks: List["StackSchema"] = Relationship(
        back_populates="components", link_model=StackCompositionSchema
    )

    def update(
        self, component_update: ComponentUpdateModel
    ) -> "StackComponentSchema":
        """Updates a `StackSchema` from a `ComponentUpdateModel`.

        Args:
            component_update: The `ComponentUpdateModel` to update from.

        Returns:
            The updated `StackComponentSchema`.
        """
        for field, value in component_update.dict(
            exclude_unset=True, exclude={"project", "user"}
        ).items():
            if field == "configuration":
                self.configuration = base64.b64encode(
                    json.dumps(component_update.configuration).encode("utf-8")
                )
            else:
                setattr(self, field, value)

        self.updated = datetime.now()
        return self

    def to_model(self) -> "ComponentResponseModel":
        """Creates a `ComponentModel` from an instance of a `StackSchema`.

        Returns:
            A `ComponentModel`
        """
        return ComponentResponseModel(
            id=self.id,
            name=self.name,
            type=self.type,
            flavor=self.flavor,
            user=self.user.to_model(),
            project=self.project.to_model(),
            is_shared=self.is_shared,
            configuration=json.loads(
                base64.b64decode(self.configuration).decode()
            ),
            created=self.created,
            updated=self.updated,
        )
to_model(self)

Creates a ComponentModel from an instance of a StackSchema.

Returns:

Type Description
ComponentResponseModel

A ComponentModel

Source code in zenml/zen_stores/schemas/component_schemas.py
def to_model(self) -> "ComponentResponseModel":
    """Creates a `ComponentModel` from an instance of a `StackSchema`.

    Returns:
        A `ComponentModel`
    """
    return ComponentResponseModel(
        id=self.id,
        name=self.name,
        type=self.type,
        flavor=self.flavor,
        user=self.user.to_model(),
        project=self.project.to_model(),
        is_shared=self.is_shared,
        configuration=json.loads(
            base64.b64decode(self.configuration).decode()
        ),
        created=self.created,
        updated=self.updated,
    )
update(self, component_update)

Updates a StackSchema from a ComponentUpdateModel.

Parameters:

Name Type Description Default
component_update ComponentUpdateModel

The ComponentUpdateModel to update from.

required

Returns:

Type Description
StackComponentSchema

The updated StackComponentSchema.

Source code in zenml/zen_stores/schemas/component_schemas.py
def update(
    self, component_update: ComponentUpdateModel
) -> "StackComponentSchema":
    """Updates a `StackSchema` from a `ComponentUpdateModel`.

    Args:
        component_update: The `ComponentUpdateModel` to update from.

    Returns:
        The updated `StackComponentSchema`.
    """
    for field, value in component_update.dict(
        exclude_unset=True, exclude={"project", "user"}
    ).items():
        if field == "configuration":
            self.configuration = base64.b64encode(
                json.dumps(component_update.configuration).encode("utf-8")
            )
        else:
            setattr(self, field, value)

    self.updated = datetime.now()
    return self

flavor_schemas

SQL Model Implementations for Flavors.

FlavorSchema (NamedSchema) pydantic-model

SQL Model for flavors.

Attributes:

Name Type Description
type StackComponentType

The type of the flavor.

source str

The source of the flavor.

config_schema str

The config schema of the flavor.

integration Optional[str]

The integration associated with the flavor.

Source code in zenml/zen_stores/schemas/flavor_schemas.py
class FlavorSchema(NamedSchema, table=True):
    """SQL Model for flavors.

    Attributes:
        type: The type of the flavor.
        source: The source of the flavor.
        config_schema: The config schema of the flavor.
        integration: The integration associated with the flavor.
    """

    __tablename__ = "flavor"

    type: StackComponentType
    source: str
    config_schema: str = Field(sa_column=Column(String(4096)), nullable=False)
    integration: Optional[str] = Field(default="")

    project_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=ProjectSchema.__tablename__,
        source_column="project_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    project: "ProjectSchema" = Relationship(back_populates="flavors")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: "UserSchema" = Relationship(back_populates="flavors")

    def to_model(self) -> FlavorResponseModel:
        """Converts a flavor schema to a flavor model.

        Returns:
            The flavor model.
        """
        return FlavorResponseModel(
            id=self.id,
            name=self.name,
            type=self.type,
            source=self.source,
            config_schema=self.config_schema,
            integration=self.integration,
            user=self.user.to_model(),
            project=self.project.to_model(),
            created=self.created,
            updated=self.updated,
        )
to_model(self)

Converts a flavor schema to a flavor model.

Returns:

Type Description
FlavorResponseModel

The flavor model.

Source code in zenml/zen_stores/schemas/flavor_schemas.py
def to_model(self) -> FlavorResponseModel:
    """Converts a flavor schema to a flavor model.

    Returns:
        The flavor model.
    """
    return FlavorResponseModel(
        id=self.id,
        name=self.name,
        type=self.type,
        source=self.source,
        config_schema=self.config_schema,
        integration=self.integration,
        user=self.user.to_model(),
        project=self.project.to_model(),
        created=self.created,
        updated=self.updated,
    )

pipeline_run_schemas

SQLModel implementation of pipeline run tables.

PipelineRunSchema (NamedSchema) pydantic-model

SQL Model for pipeline runs.

Source code in zenml/zen_stores/schemas/pipeline_run_schemas.py
class PipelineRunSchema(NamedSchema, table=True):
    """SQL Model for pipeline runs."""

    __tablename__ = "pipeline_run"

    pipeline_configuration: str = Field(sa_column=Column(TEXT, nullable=False))
    num_steps: Optional[int]

    zenml_version: str
    git_sha: Optional[str] = Field(nullable=True)
    mlmd_id: Optional[int] = Field(default=None, nullable=True)
    stack_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=StackSchema.__tablename__,
        source_column="stack_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    stack: "StackSchema" = Relationship(back_populates="runs")

    pipeline_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=PipelineSchema.__tablename__,
        source_column="pipeline_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    pipeline: "PipelineSchema" = Relationship(back_populates="runs")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: "UserSchema" = Relationship(back_populates="runs")

    project_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=ProjectSchema.__tablename__,
        source_column="project_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    project: "ProjectSchema" = Relationship(back_populates="runs")

    orchestrator_run_id: Optional[str] = Field(nullable=True)

    status: ExecutionStatus

    def to_model(
        self, _block_recursion: bool = False
    ) -> PipelineRunResponseModel:
        """Convert a `PipelineRunSchema` to a `PipelineRunResponseModel`.

        Returns:
            The created `PipelineRunResponseModel`.
        """
        if _block_recursion:
            return PipelineRunResponseModel(
                id=self.id,
                name=self.name,
                stack=self.stack.to_model() if self.stack else None,
                project=self.project.to_model(),
                user=self.user.to_model(),
                orchestrator_run_id=self.orchestrator_run_id,
                status=self.status,
                pipeline_configuration=json.loads(self.pipeline_configuration),
                num_steps=self.num_steps,
                git_sha=self.git_sha,
                zenml_version=self.zenml_version,
                mlmd_id=self.mlmd_id,
                created=self.created,
                updated=self.updated,
            )
        else:
            return PipelineRunResponseModel(
                id=self.id,
                name=self.name,
                stack=self.stack.to_model() if self.stack else None,
                project=self.project.to_model(),
                user=self.user.to_model(),
                orchestrator_run_id=self.orchestrator_run_id,
                status=self.status,
                pipeline=(
                    self.pipeline.to_model(not _block_recursion)
                    if self.pipeline
                    else None
                ),
                pipeline_configuration=json.loads(self.pipeline_configuration),
                num_steps=self.num_steps,
                git_sha=self.git_sha,
                zenml_version=self.zenml_version,
                mlmd_id=self.mlmd_id,
                created=self.created,
                updated=self.updated,
            )

    def update(
        self, run_update: "PipelineRunUpdateModel"
    ) -> "PipelineRunSchema":
        """Update a `PipelineRunSchema` with a `PipelineRunUpdateModel`.

        Args:
            run_update: The `PipelineRunUpdateModel` to update with.

        Returns:
            The updated `PipelineRunSchema`.
        """
        if run_update.mlmd_id:
            self.mlmd_id = run_update.mlmd_id

        if run_update.status:
            self.status = run_update.status

        self.updated = datetime.now()
        return self
to_model(self, _block_recursion=False)

Convert a PipelineRunSchema to a PipelineRunResponseModel.

Returns:

Type Description
PipelineRunResponseModel

The created PipelineRunResponseModel.

Source code in zenml/zen_stores/schemas/pipeline_run_schemas.py
def to_model(
    self, _block_recursion: bool = False
) -> PipelineRunResponseModel:
    """Convert a `PipelineRunSchema` to a `PipelineRunResponseModel`.

    Returns:
        The created `PipelineRunResponseModel`.
    """
    if _block_recursion:
        return PipelineRunResponseModel(
            id=self.id,
            name=self.name,
            stack=self.stack.to_model() if self.stack else None,
            project=self.project.to_model(),
            user=self.user.to_model(),
            orchestrator_run_id=self.orchestrator_run_id,
            status=self.status,
            pipeline_configuration=json.loads(self.pipeline_configuration),
            num_steps=self.num_steps,
            git_sha=self.git_sha,
            zenml_version=self.zenml_version,
            mlmd_id=self.mlmd_id,
            created=self.created,
            updated=self.updated,
        )
    else:
        return PipelineRunResponseModel(
            id=self.id,
            name=self.name,
            stack=self.stack.to_model() if self.stack else None,
            project=self.project.to_model(),
            user=self.user.to_model(),
            orchestrator_run_id=self.orchestrator_run_id,
            status=self.status,
            pipeline=(
                self.pipeline.to_model(not _block_recursion)
                if self.pipeline
                else None
            ),
            pipeline_configuration=json.loads(self.pipeline_configuration),
            num_steps=self.num_steps,
            git_sha=self.git_sha,
            zenml_version=self.zenml_version,
            mlmd_id=self.mlmd_id,
            created=self.created,
            updated=self.updated,
        )
update(self, run_update)

Update a PipelineRunSchema with a PipelineRunUpdateModel.

Parameters:

Name Type Description Default
run_update PipelineRunUpdateModel

The PipelineRunUpdateModel to update with.

required

Returns:

Type Description
PipelineRunSchema

The updated PipelineRunSchema.

Source code in zenml/zen_stores/schemas/pipeline_run_schemas.py
def update(
    self, run_update: "PipelineRunUpdateModel"
) -> "PipelineRunSchema":
    """Update a `PipelineRunSchema` with a `PipelineRunUpdateModel`.

    Args:
        run_update: The `PipelineRunUpdateModel` to update with.

    Returns:
        The updated `PipelineRunSchema`.
    """
    if run_update.mlmd_id:
        self.mlmd_id = run_update.mlmd_id

    if run_update.status:
        self.status = run_update.status

    self.updated = datetime.now()
    return self

pipeline_schemas

SQL Model Implementations for Pipelines and Pipeline Runs.

PipelineSchema (NamedSchema) pydantic-model

SQL Model for pipelines.

Source code in zenml/zen_stores/schemas/pipeline_schemas.py
class PipelineSchema(NamedSchema, table=True):
    """SQL Model for pipelines."""

    docstring: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
    spec: str = Field(sa_column=Column(TEXT, nullable=False))

    __tablename__ = "pipeline"

    project_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=ProjectSchema.__tablename__,
        source_column="project_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    project: "ProjectSchema" = Relationship(back_populates="pipelines")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )

    user: "UserSchema" = Relationship(back_populates="pipelines")

    runs: List["PipelineRunSchema"] = Relationship(
        back_populates="pipeline",
        sa_relationship_kwargs={"order_by": "asc(PipelineRunSchema.created)"},
    )

    def to_model(
        self,
        _block_recursion: bool = False,
        last_x_runs: int = 3,
    ) -> "PipelineResponseModel":
        """Convert a `PipelineSchema` to a `PipelineModel`.

        Args:
            _block_recursion: Don't recursively fill attributes
            last_x_runs: How many runs to use for the execution status

        Returns:
            The created PipelineModel.
        """
        x_runs = self.runs[:last_x_runs]
        status_last_x_runs = []
        for run in x_runs:
            status_last_x_runs.append(run.status)
        if _block_recursion:
            return PipelineResponseModel(
                id=self.id,
                name=self.name,
                project=self.project.to_model(),
                user=self.user.to_model(),
                docstring=self.docstring,
                spec=PipelineSpec.parse_raw(self.spec),
                created=self.created,
                updated=self.updated,
                status=status_last_x_runs,
            )
        else:
            return PipelineResponseModel(
                id=self.id,
                name=self.name,
                project=self.project.to_model(),
                user=self.user.to_model(),
                runs=[r.to_model(True) for r in self.runs],
                docstring=self.docstring,
                spec=PipelineSpec.parse_raw(self.spec),
                created=self.created,
                updated=self.updated,
                status=status_last_x_runs,
            )

    def update(
        self, pipeline_update: "PipelineUpdateModel"
    ) -> "PipelineSchema":
        """Update a `PipelineSchema` with a `PipelineUpdateModel`.

        Args:
            pipeline_update: The update model.

        Returns:
            The updated `PipelineSchema`.
        """
        if pipeline_update.name:
            self.name = pipeline_update.name

        if pipeline_update.docstring:
            self.docstring = pipeline_update.docstring

        if pipeline_update.spec:
            self.spec = pipeline_update.spec.json(sort_keys=True)

        self.updated = datetime.now()
        return self
to_model(self, _block_recursion=False, last_x_runs=3)

Convert a PipelineSchema to a PipelineModel.

Parameters:

Name Type Description Default
_block_recursion bool

Don't recursively fill attributes

False
last_x_runs int

How many runs to use for the execution status

3

Returns:

Type Description
PipelineResponseModel

The created PipelineModel.

Source code in zenml/zen_stores/schemas/pipeline_schemas.py
def to_model(
    self,
    _block_recursion: bool = False,
    last_x_runs: int = 3,
) -> "PipelineResponseModel":
    """Convert a `PipelineSchema` to a `PipelineModel`.

    Args:
        _block_recursion: Don't recursively fill attributes
        last_x_runs: How many runs to use for the execution status

    Returns:
        The created PipelineModel.
    """
    x_runs = self.runs[:last_x_runs]
    status_last_x_runs = []
    for run in x_runs:
        status_last_x_runs.append(run.status)
    if _block_recursion:
        return PipelineResponseModel(
            id=self.id,
            name=self.name,
            project=self.project.to_model(),
            user=self.user.to_model(),
            docstring=self.docstring,
            spec=PipelineSpec.parse_raw(self.spec),
            created=self.created,
            updated=self.updated,
            status=status_last_x_runs,
        )
    else:
        return PipelineResponseModel(
            id=self.id,
            name=self.name,
            project=self.project.to_model(),
            user=self.user.to_model(),
            runs=[r.to_model(True) for r in self.runs],
            docstring=self.docstring,
            spec=PipelineSpec.parse_raw(self.spec),
            created=self.created,
            updated=self.updated,
            status=status_last_x_runs,
        )
update(self, pipeline_update)

Update a PipelineSchema with a PipelineUpdateModel.

Parameters:

Name Type Description Default
pipeline_update PipelineUpdateModel

The update model.

required

Returns:

Type Description
PipelineSchema

The updated PipelineSchema.

Source code in zenml/zen_stores/schemas/pipeline_schemas.py
def update(
    self, pipeline_update: "PipelineUpdateModel"
) -> "PipelineSchema":
    """Update a `PipelineSchema` with a `PipelineUpdateModel`.

    Args:
        pipeline_update: The update model.

    Returns:
        The updated `PipelineSchema`.
    """
    if pipeline_update.name:
        self.name = pipeline_update.name

    if pipeline_update.docstring:
        self.docstring = pipeline_update.docstring

    if pipeline_update.spec:
        self.spec = pipeline_update.spec.json(sort_keys=True)

    self.updated = datetime.now()
    return self

project_schemas

SQL Model Implementations for Projects.

ProjectSchema (NamedSchema) pydantic-model

SQL Model for projects.

Source code in zenml/zen_stores/schemas/project_schemas.py
class ProjectSchema(NamedSchema, table=True):
    """SQL Model for projects."""

    __tablename__ = "workspace"

    description: str

    user_role_assignments: List["UserRoleAssignmentSchema"] = Relationship(
        back_populates="project", sa_relationship_kwargs={"cascade": "delete"}
    )
    team_role_assignments: List["TeamRoleAssignmentSchema"] = Relationship(
        back_populates="project",
        sa_relationship_kwargs={"cascade": "all, delete"},
    )
    stacks: List["StackSchema"] = Relationship(
        back_populates="project", sa_relationship_kwargs={"cascade": "delete"}
    )
    components: List["StackComponentSchema"] = Relationship(
        back_populates="project", sa_relationship_kwargs={"cascade": "delete"}
    )
    flavors: List["FlavorSchema"] = Relationship(
        back_populates="project", sa_relationship_kwargs={"cascade": "delete"}
    )
    pipelines: List["PipelineSchema"] = Relationship(
        back_populates="project", sa_relationship_kwargs={"cascade": "delete"}
    )
    runs: List["PipelineRunSchema"] = Relationship(
        back_populates="project", sa_relationship_kwargs={"cascade": "delete"}
    )

    @classmethod
    def from_request(cls, project: ProjectRequestModel) -> "ProjectSchema":
        """Create a `ProjectSchema` from a `ProjectResponseModel`.

        Args:
            project: The `ProjectResponseModel` from which to create the schema.

        Returns:
            The created `ProjectSchema`.
        """
        return cls(name=project.name, description=project.description)

    def update(self, project_update: ProjectUpdateModel) -> "ProjectSchema":
        """Update a `ProjectSchema` from a `ProjectUpdateModel`.

        Args:
            project_update: The `ProjectUpdateModel` from which to update the
                schema.

        Returns:
            The updated `ProjectSchema`.
        """
        for field, value in project_update.dict(exclude_unset=True).items():
            setattr(self, field, value)

        self.updated = datetime.now()
        return self

    def to_model(self) -> ProjectResponseModel:
        """Convert a `ProjectSchema` to a `ProjectResponseModel`.

        Returns:
            The converted `ProjectResponseModel`.
        """
        return ProjectResponseModel(
            id=self.id,
            name=self.name,
            description=self.description,
            created=self.created,
            updated=self.updated,
        )
from_request(project) classmethod

Create a ProjectSchema from a ProjectResponseModel.

Parameters:

Name Type Description Default
project ProjectRequestModel

The ProjectResponseModel from which to create the schema.

required

Returns:

Type Description
ProjectSchema

The created ProjectSchema.

Source code in zenml/zen_stores/schemas/project_schemas.py
@classmethod
def from_request(cls, project: ProjectRequestModel) -> "ProjectSchema":
    """Create a `ProjectSchema` from a `ProjectResponseModel`.

    Args:
        project: The `ProjectResponseModel` from which to create the schema.

    Returns:
        The created `ProjectSchema`.
    """
    return cls(name=project.name, description=project.description)
to_model(self)

Convert a ProjectSchema to a ProjectResponseModel.

Returns:

Type Description
ProjectResponseModel

The converted ProjectResponseModel.

Source code in zenml/zen_stores/schemas/project_schemas.py
def to_model(self) -> ProjectResponseModel:
    """Convert a `ProjectSchema` to a `ProjectResponseModel`.

    Returns:
        The converted `ProjectResponseModel`.
    """
    return ProjectResponseModel(
        id=self.id,
        name=self.name,
        description=self.description,
        created=self.created,
        updated=self.updated,
    )
update(self, project_update)

Update a ProjectSchema from a ProjectUpdateModel.

Parameters:

Name Type Description Default
project_update ProjectUpdateModel

The ProjectUpdateModel from which to update the schema.

required

Returns:

Type Description
ProjectSchema

The updated ProjectSchema.

Source code in zenml/zen_stores/schemas/project_schemas.py
def update(self, project_update: ProjectUpdateModel) -> "ProjectSchema":
    """Update a `ProjectSchema` from a `ProjectUpdateModel`.

    Args:
        project_update: The `ProjectUpdateModel` from which to update the
            schema.

    Returns:
        The updated `ProjectSchema`.
    """
    for field, value in project_update.dict(exclude_unset=True).items():
        setattr(self, field, value)

    self.updated = datetime.now()
    return self

role_schemas

SQLModel implementation of roles that can be assigned to users or teams.

RolePermissionSchema (SQLModel) pydantic-model

SQL Model for team assignments.

Source code in zenml/zen_stores/schemas/role_schemas.py
class RolePermissionSchema(SQLModel, table=True):
    """SQL Model for team assignments."""

    __tablename__ = "role_permission"

    name: PermissionType = Field(primary_key=True)
    role_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=RoleSchema.__tablename__,
        source_column="role_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    roles: List["RoleSchema"] = Relationship(back_populates="permissions")
RoleSchema (NamedSchema) pydantic-model

SQL Model for roles.

Source code in zenml/zen_stores/schemas/role_schemas.py
class RoleSchema(NamedSchema, table=True):
    """SQL Model for roles."""

    __tablename__ = "role"

    permissions: List["RolePermissionSchema"] = Relationship(
        back_populates="roles", sa_relationship_kwargs={"cascade": "delete"}
    )
    user_role_assignments: List["UserRoleAssignmentSchema"] = Relationship(
        back_populates="role", sa_relationship_kwargs={"cascade": "delete"}
    )
    team_role_assignments: List["TeamRoleAssignmentSchema"] = Relationship(
        back_populates="role", sa_relationship_kwargs={"cascade": "delete"}
    )

    @classmethod
    def from_request(cls, model: RoleRequestModel) -> "RoleSchema":
        """Create a `RoleSchema` from a `RoleResponseModel`.

        Args:
            model: The `RoleResponseModel` from which to create the schema.

        Returns:
            The created `RoleSchema`.
        """
        return cls(name=model.name)

    def update(self, role_update: RoleUpdateModel) -> "RoleSchema":
        """Update a `RoleSchema` from a `RoleUpdateModel`.

        Args:
            role_update: The `RoleUpdateModel` from which to update the schema.

        Returns:
            The updated `RoleSchema`.
        """
        for field, value in role_update.dict(
            exclude_unset=True, exclude={"permissions"}
        ).items():
            setattr(self, field, value)

        self.updated = datetime.now()
        return self

    def to_model(self) -> RoleResponseModel:
        """Convert a `RoleSchema` to a `RoleResponseModel`.

        Returns:
            The converted `RoleResponseModel`.
        """
        return RoleResponseModel(
            id=self.id,
            name=self.name,
            created=self.created,
            updated=self.updated,
            permissions=[PermissionType(p.name) for p in self.permissions],
        )
from_request(model) classmethod

Create a RoleSchema from a RoleResponseModel.

Parameters:

Name Type Description Default
model RoleRequestModel

The RoleResponseModel from which to create the schema.

required

Returns:

Type Description
RoleSchema

The created RoleSchema.

Source code in zenml/zen_stores/schemas/role_schemas.py
@classmethod
def from_request(cls, model: RoleRequestModel) -> "RoleSchema":
    """Create a `RoleSchema` from a `RoleResponseModel`.

    Args:
        model: The `RoleResponseModel` from which to create the schema.

    Returns:
        The created `RoleSchema`.
    """
    return cls(name=model.name)
to_model(self)

Convert a RoleSchema to a RoleResponseModel.

Returns:

Type Description
RoleResponseModel

The converted RoleResponseModel.

Source code in zenml/zen_stores/schemas/role_schemas.py
def to_model(self) -> RoleResponseModel:
    """Convert a `RoleSchema` to a `RoleResponseModel`.

    Returns:
        The converted `RoleResponseModel`.
    """
    return RoleResponseModel(
        id=self.id,
        name=self.name,
        created=self.created,
        updated=self.updated,
        permissions=[PermissionType(p.name) for p in self.permissions],
    )
update(self, role_update)

Update a RoleSchema from a RoleUpdateModel.

Parameters:

Name Type Description Default
role_update RoleUpdateModel

The RoleUpdateModel from which to update the schema.

required

Returns:

Type Description
RoleSchema

The updated RoleSchema.

Source code in zenml/zen_stores/schemas/role_schemas.py
def update(self, role_update: RoleUpdateModel) -> "RoleSchema":
    """Update a `RoleSchema` from a `RoleUpdateModel`.

    Args:
        role_update: The `RoleUpdateModel` from which to update the schema.

    Returns:
        The updated `RoleSchema`.
    """
    for field, value in role_update.dict(
        exclude_unset=True, exclude={"permissions"}
    ).items():
        setattr(self, field, value)

    self.updated = datetime.now()
    return self
TeamRoleAssignmentSchema (BaseSchema) pydantic-model

SQL Model for assigning roles to teams for a given project.

Source code in zenml/zen_stores/schemas/role_schemas.py
class TeamRoleAssignmentSchema(BaseSchema, table=True):
    """SQL Model for assigning roles to teams for a given project."""

    __tablename__ = "team_role_assignment"

    id: UUID = Field(primary_key=True, default_factory=uuid4)
    role_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=RoleSchema.__tablename__,
        source_column="role_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    team_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=TeamSchema.__tablename__,
        source_column="team_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    project_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=ProjectSchema.__tablename__,
        source_column="project_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )
    role: RoleSchema = Relationship(back_populates="team_role_assignments")
    team: "TeamSchema" = Relationship(back_populates="assigned_roles")
    project: Optional["ProjectSchema"] = Relationship(
        back_populates="team_role_assignments"
    )

    @classmethod
    def from_request(
        cls, role_assignment: RoleAssignmentRequestModel
    ) -> "TeamRoleAssignmentSchema":
        """Create a `TeamRoleAssignmentSchema` from a `RoleAssignmentRequestModel`.

        Args:
            role_assignment: The `RoleAssignmentRequestModel` from which to
                create the schema.

        Returns:
            The created `TeamRoleAssignmentSchema`.
        """
        return cls(
            role_id=role_assignment.role,
            team_id=role_assignment.team,
            project_id=role_assignment.project,
        )

    def to_model(self) -> RoleAssignmentResponseModel:
        """Convert a `TeamRoleAssignmentSchema` to a `RoleAssignmentModel`.

        Returns:
            The converted `RoleAssignmentModel`.
        """
        return RoleAssignmentResponseModel(
            id=self.id,
            project=self.project.to_model() if self.project else None,
            user=self.team.to_model(_block_recursion=True),
            role=self.role.to_model(),
            created=self.created,
            updated=self.updated,
        )
from_request(role_assignment) classmethod

Create a TeamRoleAssignmentSchema from a RoleAssignmentRequestModel.

Parameters:

Name Type Description Default
role_assignment RoleAssignmentRequestModel

The RoleAssignmentRequestModel from which to create the schema.

required

Returns:

Type Description
TeamRoleAssignmentSchema

The created TeamRoleAssignmentSchema.

Source code in zenml/zen_stores/schemas/role_schemas.py
@classmethod
def from_request(
    cls, role_assignment: RoleAssignmentRequestModel
) -> "TeamRoleAssignmentSchema":
    """Create a `TeamRoleAssignmentSchema` from a `RoleAssignmentRequestModel`.

    Args:
        role_assignment: The `RoleAssignmentRequestModel` from which to
            create the schema.

    Returns:
        The created `TeamRoleAssignmentSchema`.
    """
    return cls(
        role_id=role_assignment.role,
        team_id=role_assignment.team,
        project_id=role_assignment.project,
    )
to_model(self)

Convert a TeamRoleAssignmentSchema to a RoleAssignmentModel.

Returns:

Type Description
RoleAssignmentResponseModel

The converted RoleAssignmentModel.

Source code in zenml/zen_stores/schemas/role_schemas.py
def to_model(self) -> RoleAssignmentResponseModel:
    """Convert a `TeamRoleAssignmentSchema` to a `RoleAssignmentModel`.

    Returns:
        The converted `RoleAssignmentModel`.
    """
    return RoleAssignmentResponseModel(
        id=self.id,
        project=self.project.to_model() if self.project else None,
        user=self.team.to_model(_block_recursion=True),
        role=self.role.to_model(),
        created=self.created,
        updated=self.updated,
    )
UserRoleAssignmentSchema (BaseSchema) pydantic-model

SQL Model for assigning roles to users for a given project.

Source code in zenml/zen_stores/schemas/role_schemas.py
class UserRoleAssignmentSchema(BaseSchema, table=True):
    """SQL Model for assigning roles to users for a given project."""

    __tablename__ = "user_role_assignment"

    id: UUID = Field(primary_key=True, default_factory=uuid4)
    role_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=RoleSchema.__tablename__,
        source_column="role_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    user_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    project_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=ProjectSchema.__tablename__,
        source_column="project_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=True,
    )

    role: RoleSchema = Relationship(back_populates="user_role_assignments")
    user: "UserSchema" = Relationship(back_populates="assigned_roles")
    project: Optional["ProjectSchema"] = Relationship(
        back_populates="user_role_assignments"
    )

    @classmethod
    def from_request(
        cls, role_assignment: RoleAssignmentRequestModel
    ) -> "UserRoleAssignmentSchema":
        """Create a `UserRoleAssignmentSchema` from a `RoleAssignmentRequestModel`.

        Args:
            role_assignment: The `RoleAssignmentRequestModel` from which to
                create the schema.

        Returns:
            The created `UserRoleAssignmentSchema`.
        """
        return cls(
            role_id=role_assignment.role,
            user_id=role_assignment.user,
            project_id=role_assignment.project,
        )

    def to_model(self) -> RoleAssignmentResponseModel:
        """Convert a `UserRoleAssignmentSchema` to a `RoleAssignmentModel`.

        Returns:
            The converted `RoleAssignmentModel`.
        """
        return RoleAssignmentResponseModel(
            id=self.id,
            project=self.project.to_model() if self.project else None,
            user=self.user.to_model(_block_recursion=True),
            role=self.role.to_model(),
            created=self.created,
            updated=self.updated,
        )
from_request(role_assignment) classmethod

Create a UserRoleAssignmentSchema from a RoleAssignmentRequestModel.

Parameters:

Name Type Description Default
role_assignment RoleAssignmentRequestModel

The RoleAssignmentRequestModel from which to create the schema.

required

Returns:

Type Description
UserRoleAssignmentSchema

The created UserRoleAssignmentSchema.

Source code in zenml/zen_stores/schemas/role_schemas.py
@classmethod
def from_request(
    cls, role_assignment: RoleAssignmentRequestModel
) -> "UserRoleAssignmentSchema":
    """Create a `UserRoleAssignmentSchema` from a `RoleAssignmentRequestModel`.

    Args:
        role_assignment: The `RoleAssignmentRequestModel` from which to
            create the schema.

    Returns:
        The created `UserRoleAssignmentSchema`.
    """
    return cls(
        role_id=role_assignment.role,
        user_id=role_assignment.user,
        project_id=role_assignment.project,
    )
to_model(self)

Convert a UserRoleAssignmentSchema to a RoleAssignmentModel.

Returns:

Type Description
RoleAssignmentResponseModel

The converted RoleAssignmentModel.

Source code in zenml/zen_stores/schemas/role_schemas.py
def to_model(self) -> RoleAssignmentResponseModel:
    """Convert a `UserRoleAssignmentSchema` to a `RoleAssignmentModel`.

    Returns:
        The converted `RoleAssignmentModel`.
    """
    return RoleAssignmentResponseModel(
        id=self.id,
        project=self.project.to_model() if self.project else None,
        user=self.user.to_model(_block_recursion=True),
        role=self.role.to_model(),
        created=self.created,
        updated=self.updated,
    )

schema_utils

Utility functions for SQLModel schemas.

build_foreign_key_field(source, target, source_column, target_column, ondelete, nullable, **sa_column_kwargs)

Build a SQLModel foreign key field.

Parameters:

Name Type Description Default
source str

Source table name.

required
target str

Target table name.

required
source_column str

Source column name.

required
target_column str

Target column name.

required
ondelete str

On delete behavior.

required
nullable bool

Whether the field is nullable.

required
**sa_column_kwargs Any

Keyword arguments for the SQLAlchemy column.

{}

Returns:

Type Description
Any

SQLModel foreign key field.

Exceptions:

Type Description
ValueError

If the ondelete and nullable arguments are not compatible.

Source code in zenml/zen_stores/schemas/schema_utils.py
def build_foreign_key_field(
    source: str,
    target: str,
    source_column: str,
    target_column: str,
    ondelete: str,
    nullable: bool,
    **sa_column_kwargs: Any,
) -> Any:
    """Build a SQLModel foreign key field.

    Args:
        source: Source table name.
        target: Target table name.
        source_column: Source column name.
        target_column: Target column name.
        ondelete: On delete behavior.
        nullable: Whether the field is nullable.
        **sa_column_kwargs: Keyword arguments for the SQLAlchemy column.

    Returns:
        SQLModel foreign key field.

    Raises:
        ValueError: If the ondelete and nullable arguments are not compatible.
    """
    if not nullable and ondelete == "SET NULL":
        raise ValueError(
            "Cannot set ondelete to SET NULL if the field is not nullable."
        )
    constraint_name = foreign_key_constraint_name(
        source=source,
        target=target,
        source_column=source_column,
    )
    return Field(
        sa_column=Column(
            ForeignKey(
                f"{target}.{target_column}",
                name=constraint_name,
                ondelete=ondelete,
            ),
            nullable=nullable,
            **sa_column_kwargs,
        ),
    )
foreign_key_constraint_name(source, target, source_column)

Defines the name of a foreign key constraint.

For simplicity, we use the naming convention used by alembic here: https://alembic.sqlalchemy.org/en/latest/batch.html#dropping-unnamed-or-named-foreign-key-constraints.

Parameters:

Name Type Description Default
source str

Source table name.

required
target str

Target table name.

required
source_column str

Source column name.

required

Returns:

Type Description
str

Name of the foreign key constraint.

Source code in zenml/zen_stores/schemas/schema_utils.py
def foreign_key_constraint_name(
    source: str, target: str, source_column: str
) -> str:
    """Defines the name of a foreign key constraint.

    For simplicity, we use the naming convention used by alembic here:
    https://alembic.sqlalchemy.org/en/latest/batch.html#dropping-unnamed-or-named-foreign-key-constraints.

    Args:
        source: Source table name.
        target: Target table name.
        source_column: Source column name.

    Returns:
        Name of the foreign key constraint.
    """
    return f"fk_{source}_{source_column}_{target}"

stack_schemas

SQL Model Implementations for Stacks.

StackCompositionSchema (SQLModel) pydantic-model

SQL Model for stack definitions.

Join table between Stacks and StackComponents.

Source code in zenml/zen_stores/schemas/stack_schemas.py
class StackCompositionSchema(SQLModel, table=True):
    """SQL Model for stack definitions.

    Join table between Stacks and StackComponents.
    """

    __tablename__ = "stack_composition"

    stack_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="stack",  # TODO: how to reference `StackSchema.__tablename__`?
        source_column="stack_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    component_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="stack_component",  # TODO: how to reference `StackComponentSchema.__tablename__`?
        source_column="component_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
StackSchema (ShareableSchema) pydantic-model

SQL Model for stacks.

Source code in zenml/zen_stores/schemas/stack_schemas.py
class StackSchema(ShareableSchema, table=True):
    """SQL Model for stacks."""

    __tablename__ = "stack"

    project_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=ProjectSchema.__tablename__,
        source_column="project_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    project: "ProjectSchema" = Relationship(back_populates="stacks")

    user_id: Optional[UUID] = build_foreign_key_field(
        source=__tablename__,
        target=UserSchema.__tablename__,
        source_column="user_id",
        target_column="id",
        ondelete="SET NULL",
        nullable=True,
    )
    user: "UserSchema" = Relationship(back_populates="stacks")

    components: List["StackComponentSchema"] = Relationship(
        back_populates="stacks",
        link_model=StackCompositionSchema,
    )
    runs: List["PipelineRunSchema"] = Relationship(back_populates="stack")

    def update(
        self,
        stack_update: "StackUpdateModel",
        components: List["StackComponentSchema"],
    ) -> "StackSchema":
        """Updates a stack schema with a stack update model.

        Args:
            stack_update: `StackUpdateModel` to update the stack with.
            components: List of `StackComponentSchema` to update the stack with.

        Returns:
            The updated StackSchema.
        """
        for field, value in stack_update.dict(exclude_unset=True).items():
            if field == "components":
                self.components = components

            elif field == "user":
                assert self.user_id == value

            elif field == "project":
                assert self.project_id == value

            else:
                setattr(self, field, value)

        self.updated = datetime.now()
        return self

    def to_model(self) -> "StackResponseModel":
        """Converts the schema to a model.

        Returns:
            The converted model.
        """
        return StackResponseModel(
            id=self.id,
            name=self.name,
            user=self.user.to_model(),
            project=self.project.to_model(),
            is_shared=self.is_shared,
            components={c.type: [c.to_model()] for c in self.components},
            created=self.created,
            updated=self.updated,
        )
to_model(self)

Converts the schema to a model.

Returns:

Type Description
StackResponseModel

The converted model.

Source code in zenml/zen_stores/schemas/stack_schemas.py
def to_model(self) -> "StackResponseModel":
    """Converts the schema to a model.

    Returns:
        The converted model.
    """
    return StackResponseModel(
        id=self.id,
        name=self.name,
        user=self.user.to_model(),
        project=self.project.to_model(),
        is_shared=self.is_shared,
        components={c.type: [c.to_model()] for c in self.components},
        created=self.created,
        updated=self.updated,
    )
update(self, stack_update, components)

Updates a stack schema with a stack update model.

Parameters:

Name Type Description Default
stack_update StackUpdateModel

StackUpdateModel to update the stack with.

required
components List[StackComponentSchema]

List of StackComponentSchema to update the stack with.

required

Returns:

Type Description
StackSchema

The updated StackSchema.

Source code in zenml/zen_stores/schemas/stack_schemas.py
def update(
    self,
    stack_update: "StackUpdateModel",
    components: List["StackComponentSchema"],
) -> "StackSchema":
    """Updates a stack schema with a stack update model.

    Args:
        stack_update: `StackUpdateModel` to update the stack with.
        components: List of `StackComponentSchema` to update the stack with.

    Returns:
        The updated StackSchema.
    """
    for field, value in stack_update.dict(exclude_unset=True).items():
        if field == "components":
            self.components = components

        elif field == "user":
            assert self.user_id == value

        elif field == "project":
            assert self.project_id == value

        else:
            setattr(self, field, value)

    self.updated = datetime.now()
    return self

step_run_schemas

SQLModel implementation of step run tables.

StepRunInputArtifactSchema (SQLModel) pydantic-model

SQL Model that defines which artifacts are inputs to which step.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
class StepRunInputArtifactSchema(SQLModel, table=True):
    """SQL Model that defines which artifacts are inputs to which step."""

    __tablename__ = "step_run_input_artifact"

    step_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="step_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    artifact_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="artifacts",  # `ArtifactSchema.__tablename__`
        source_column="artifact_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    name: str  # Name of the input in the step
StepRunParentsSchema (SQLModel) pydantic-model

SQL Model that defines the order of steps.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
class StepRunParentsSchema(SQLModel, table=True):
    """SQL Model that defines the order of steps."""

    __tablename__ = "step_run_parents"

    parent_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="parent_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    child_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=StepRunSchema.__tablename__,
        source_column="child_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
StepRunSchema (NamedSchema) pydantic-model

SQL Model for steps of pipeline runs.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
class StepRunSchema(NamedSchema, table=True):
    """SQL Model for steps of pipeline runs."""

    __tablename__ = "step_run"

    pipeline_run_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target=PipelineRunSchema.__tablename__,
        source_column="pipeline_run_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
    )
    status: ExecutionStatus
    entrypoint_name: str

    parameters: str = Field(sa_column=Column(TEXT, nullable=False))
    step_configuration: str = Field(sa_column=Column(TEXT, nullable=False))
    docstring: Optional[str] = Field(sa_column=Column(TEXT, nullable=True))
    num_outputs: Optional[int]

    mlmd_id: Optional[int] = Field(default=None, nullable=True)

    @classmethod
    def from_request(cls, request: StepRunRequestModel) -> "StepRunSchema":
        """Create a step run schema from a step run request model.

        Args:
            request: The step run request model.

        Returns:
            The step run schema.
        """
        return cls(
            name=request.name,
            pipeline_run_id=request.pipeline_run_id,
            entrypoint_name=request.entrypoint_name,
            parameters=json.dumps(request.parameters),
            step_configuration=json.dumps(request.step_configuration),
            docstring=request.docstring,
            mlmd_id=request.mlmd_id,
            num_outputs=request.num_outputs,
            status=request.status,
        )

    def to_model(
        self,
        parent_step_ids: List[UUID],
        mlmd_parent_step_ids: List[int],
        input_artifacts: Dict[str, UUID],
    ) -> StepRunResponseModel:
        """Convert a `StepRunSchema` to a `StepRunModel`.

        Args:
            parent_step_ids: The parent step ids to link to the step.
            mlmd_parent_step_ids: The parent step ids in MLMD
            input_artifacts: The input artifacts to link to the step.

        Returns:
            The created StepRunModel.
        """
        return StepRunResponseModel(
            id=self.id,
            name=self.name,
            pipeline_run_id=self.pipeline_run_id,
            parent_step_ids=parent_step_ids,
            entrypoint_name=self.entrypoint_name,
            parameters=json.loads(self.parameters),
            step_configuration=json.loads(self.step_configuration),
            docstring=self.docstring,
            status=self.status,
            mlmd_id=self.mlmd_id,
            mlmd_parent_step_ids=mlmd_parent_step_ids,
            created=self.created,
            updated=self.updated,
            input_artifacts=input_artifacts,
            num_outputs=self.num_outputs,
        )

    def update(self, step_update: StepRunUpdateModel) -> "StepRunSchema":
        """Update a step run schema with a step run update model.

        Args:
            step_update: The step run update model.

        Returns:
            The updated step run schema.
        """
        # For steps only the execution status is mutable.
        if "status" in step_update.__fields_set__ and step_update.status:
            self.status = step_update.status

        self.updated = datetime.now()

        return self
from_request(request) classmethod

Create a step run schema from a step run request model.

Parameters:

Name Type Description Default
request StepRunRequestModel

The step run request model.

required

Returns:

Type Description
StepRunSchema

The step run schema.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
@classmethod
def from_request(cls, request: StepRunRequestModel) -> "StepRunSchema":
    """Create a step run schema from a step run request model.

    Args:
        request: The step run request model.

    Returns:
        The step run schema.
    """
    return cls(
        name=request.name,
        pipeline_run_id=request.pipeline_run_id,
        entrypoint_name=request.entrypoint_name,
        parameters=json.dumps(request.parameters),
        step_configuration=json.dumps(request.step_configuration),
        docstring=request.docstring,
        mlmd_id=request.mlmd_id,
        num_outputs=request.num_outputs,
        status=request.status,
    )
to_model(self, parent_step_ids, mlmd_parent_step_ids, input_artifacts)

Convert a StepRunSchema to a StepRunModel.

Parameters:

Name Type Description Default
parent_step_ids List[uuid.UUID]

The parent step ids to link to the step.

required
mlmd_parent_step_ids List[int]

The parent step ids in MLMD

required
input_artifacts Dict[str, uuid.UUID]

The input artifacts to link to the step.

required

Returns:

Type Description
StepRunResponseModel

The created StepRunModel.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
def to_model(
    self,
    parent_step_ids: List[UUID],
    mlmd_parent_step_ids: List[int],
    input_artifacts: Dict[str, UUID],
) -> StepRunResponseModel:
    """Convert a `StepRunSchema` to a `StepRunModel`.

    Args:
        parent_step_ids: The parent step ids to link to the step.
        mlmd_parent_step_ids: The parent step ids in MLMD
        input_artifacts: The input artifacts to link to the step.

    Returns:
        The created StepRunModel.
    """
    return StepRunResponseModel(
        id=self.id,
        name=self.name,
        pipeline_run_id=self.pipeline_run_id,
        parent_step_ids=parent_step_ids,
        entrypoint_name=self.entrypoint_name,
        parameters=json.loads(self.parameters),
        step_configuration=json.loads(self.step_configuration),
        docstring=self.docstring,
        status=self.status,
        mlmd_id=self.mlmd_id,
        mlmd_parent_step_ids=mlmd_parent_step_ids,
        created=self.created,
        updated=self.updated,
        input_artifacts=input_artifacts,
        num_outputs=self.num_outputs,
    )
update(self, step_update)

Update a step run schema with a step run update model.

Parameters:

Name Type Description Default
step_update StepRunUpdateModel

The step run update model.

required

Returns:

Type Description
StepRunSchema

The updated step run schema.

Source code in zenml/zen_stores/schemas/step_run_schemas.py
def update(self, step_update: StepRunUpdateModel) -> "StepRunSchema":
    """Update a step run schema with a step run update model.

    Args:
        step_update: The step run update model.

    Returns:
        The updated step run schema.
    """
    # For steps only the execution status is mutable.
    if "status" in step_update.__fields_set__ and step_update.status:
        self.status = step_update.status

    self.updated = datetime.now()

    return self

team_schemas

SQLModel implementation of team tables.

TeamAssignmentSchema (SQLModel) pydantic-model

SQL Model for team assignments.

Source code in zenml/zen_stores/schemas/team_schemas.py
class TeamAssignmentSchema(SQLModel, table=True):
    """SQL Model for team assignments."""

    __tablename__ = "team_assignment"

    user_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="user",  # TODO: how to reference `UserSchema.__tablename__`?
        source_column="user_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
    team_id: UUID = build_foreign_key_field(
        source=__tablename__,
        target="team",  # TODO: how to reference `TeamSchema.__tablename__`?
        source_column="team_id",
        target_column="id",
        ondelete="CASCADE",
        nullable=False,
        primary_key=True,
    )
TeamSchema (NamedSchema) pydantic-model

SQL Model for teams.

Source code in zenml/zen_stores/schemas/team_schemas.py
class TeamSchema(NamedSchema, table=True):
    """SQL Model for teams."""

    __tablename__ = "team"

    users: List["UserSchema"] = Relationship(
        back_populates="teams", link_model=TeamAssignmentSchema
    )
    assigned_roles: List["TeamRoleAssignmentSchema"] = Relationship(
        back_populates="team", sa_relationship_kwargs={"cascade": "delete"}
    )

    def update(self, team_update: TeamUpdateModel) -> "TeamSchema":
        """Update a `TeamSchema` with a `TeamUpdateModel`.

        Args:
            team_update: The `TeamUpdateModel` to update the schema with.

        Returns:
            The updated `TeamSchema`.
        """
        for field, value in team_update.dict(exclude_unset=True).items():
            if field == "users":
                pass
            else:
                setattr(self, field, value)

        self.updated = datetime.now()
        return self

    def to_model(self, _block_recursion: bool = False) -> TeamResponseModel:
        """Convert a `TeamSchema` to a `TeamResponseModel`.

        Args:
            _block_recursion: Don't recursively fill attributes

        Returns:
            The converted `TeamResponseModel`.
        """
        if _block_recursion:
            return TeamResponseModel(
                id=self.id,
                name=self.name,
                created=self.created,
                updated=self.updated,
                users=[],
            )
        else:
            return TeamResponseModel(
                id=self.id,
                name=self.name,
                created=self.created,
                updated=self.updated,
                users=[u.to_model(_block_recursion=False) for u in self.users],
            )
to_model(self, _block_recursion=False)

Convert a TeamSchema to a TeamResponseModel.

Parameters:

Name Type Description Default
_block_recursion bool

Don't recursively fill attributes

False

Returns:

Type Description
TeamResponseModel

The converted TeamResponseModel.

Source code in zenml/zen_stores/schemas/team_schemas.py
def to_model(self, _block_recursion: bool = False) -> TeamResponseModel:
    """Convert a `TeamSchema` to a `TeamResponseModel`.

    Args:
        _block_recursion: Don't recursively fill attributes

    Returns:
        The converted `TeamResponseModel`.
    """
    if _block_recursion:
        return TeamResponseModel(
            id=self.id,
            name=self.name,
            created=self.created,
            updated=self.updated,
            users=[],
        )
    else:
        return TeamResponseModel(
            id=self.id,
            name=self.name,
            created=self.created,
            updated=self.updated,
            users=[u.to_model(_block_recursion=False) for u in self.users],
        )
update(self, team_update)

Update a TeamSchema with a TeamUpdateModel.

Parameters:

Name Type Description Default
team_update TeamUpdateModel

The TeamUpdateModel to update the schema with.

required

Returns:

Type Description
TeamSchema

The updated TeamSchema.

Source code in zenml/zen_stores/schemas/team_schemas.py
def update(self, team_update: TeamUpdateModel) -> "TeamSchema":
    """Update a `TeamSchema` with a `TeamUpdateModel`.

    Args:
        team_update: The `TeamUpdateModel` to update the schema with.

    Returns:
        The updated `TeamSchema`.
    """
    for field, value in team_update.dict(exclude_unset=True).items():
        if field == "users":
            pass
        else:
            setattr(self, field, value)

    self.updated = datetime.now()
    return self

user_schemas

SQLModel implementation of user tables.

UserSchema (NamedSchema) pydantic-model

SQL Model for users.

Source code in zenml/zen_stores/schemas/user_schemas.py
class UserSchema(NamedSchema, table=True):
    """SQL Model for users."""

    __tablename__ = "user"

    full_name: str
    email: Optional[str] = Field(nullable=True)
    active: bool
    password: Optional[str] = Field(nullable=True)
    activation_token: Optional[str] = Field(nullable=True)

    email_opted_in: Optional[bool] = Field(nullable=True)

    teams: List["TeamSchema"] = Relationship(
        back_populates="users", link_model=TeamAssignmentSchema
    )
    assigned_roles: List["UserRoleAssignmentSchema"] = Relationship(
        back_populates="user", sa_relationship_kwargs={"cascade": "delete"}
    )
    stacks: List["StackSchema"] = Relationship(
        back_populates="user",
    )
    components: List["StackComponentSchema"] = Relationship(
        back_populates="user",
    )
    flavors: List["FlavorSchema"] = Relationship(
        back_populates="user",
    )
    pipelines: List["PipelineSchema"] = Relationship(
        back_populates="user",
    )
    runs: List["PipelineRunSchema"] = Relationship(
        back_populates="user",
    )

    @classmethod
    def from_request(cls, model: UserRequestModel) -> "UserSchema":
        """Create a `UserSchema` from a `UserModel`.

        Args:
            model: The `UserModel` from which to create the schema.

        Returns:
            The created `UserSchema`.
        """
        return cls(
            name=model.name,
            full_name=model.full_name,
            active=model.active,
            password=model.create_hashed_password(),
            activation_token=model.create_hashed_activation_token(),
        )

    def update(self, user_update: UserUpdateModel) -> "UserSchema":
        """Update a `UserSchema` from a `UserUpdateModel`.

        Args:
            user_update: The `UserUpdateModel` from which to update the schema.

        Returns:
            The updated `UserSchema`.
        """
        for field, value in user_update.dict(exclude_unset=True).items():
            if field == "password":
                setattr(self, field, user_update.create_hashed_password())
            elif field == "activation_token":
                setattr(
                    self, field, user_update.create_hashed_activation_token()
                )
            else:
                setattr(self, field, value)

        self.updated = datetime.now()
        return self

    def to_model(self, _block_recursion: bool = False) -> UserResponseModel:
        """Convert a `UserSchema` to a `UserResponseModel`.

        Args:
            _block_recursion: Don't recursively fill attributes

        Returns:
            The converted `UserResponseModel`.
        """
        if _block_recursion:
            return UserResponseModel(
                id=self.id,
                name=self.name,
                active=self.active,
                email_opted_in=self.email_opted_in,
                full_name=self.full_name,
                created=self.created,
                updated=self.updated,
            )
        else:
            return UserResponseModel(
                id=self.id,
                name=self.name,
                active=self.active,
                email_opted_in=self.email_opted_in,
                teams=[t.to_model(_block_recursion=True) for t in self.teams],
                full_name=self.full_name,
                created=self.created,
                updated=self.updated,
            )
from_request(model) classmethod

Create a UserSchema from a UserModel.

Parameters:

Name Type Description Default
model UserRequestModel

The UserModel from which to create the schema.

required

Returns:

Type Description
UserSchema

The created UserSchema.

Source code in zenml/zen_stores/schemas/user_schemas.py
@classmethod
def from_request(cls, model: UserRequestModel) -> "UserSchema":
    """Create a `UserSchema` from a `UserModel`.

    Args:
        model: The `UserModel` from which to create the schema.

    Returns:
        The created `UserSchema`.
    """
    return cls(
        name=model.name,
        full_name=model.full_name,
        active=model.active,
        password=model.create_hashed_password(),
        activation_token=model.create_hashed_activation_token(),
    )
to_model(self, _block_recursion=False)

Convert a UserSchema to a UserResponseModel.

Parameters:

Name Type Description Default
_block_recursion bool

Don't recursively fill attributes

False

Returns:

Type Description
UserResponseModel

The converted UserResponseModel.

Source code in zenml/zen_stores/schemas/user_schemas.py
def to_model(self, _block_recursion: bool = False) -> UserResponseModel:
    """Convert a `UserSchema` to a `UserResponseModel`.

    Args:
        _block_recursion: Don't recursively fill attributes

    Returns:
        The converted `UserResponseModel`.
    """
    if _block_recursion:
        return UserResponseModel(
            id=self.id,
            name=self.name,
            active=self.active,
            email_opted_in=self.email_opted_in,
            full_name=self.full_name,
            created=self.created,
            updated=self.updated,
        )
    else:
        return UserResponseModel(
            id=self.id,
            name=self.name,
            active=self.active,
            email_opted_in=self.email_opted_in,
            teams=[t.to_model(_block_recursion=True) for t in self.teams],
            full_name=self.full_name,
            created=self.created,
            updated=self.updated,
        )
update(self, user_update)

Update a UserSchema from a UserUpdateModel.

Parameters:

Name Type Description Default
user_update UserUpdateModel

The UserUpdateModel from which to update the schema.

required

Returns:

Type Description
UserSchema

The updated UserSchema.

Source code in zenml/zen_stores/schemas/user_schemas.py
def update(self, user_update: UserUpdateModel) -> "UserSchema":
    """Update a `UserSchema` from a `UserUpdateModel`.

    Args:
        user_update: The `UserUpdateModel` from which to update the schema.

    Returns:
        The updated `UserSchema`.
    """
    for field, value in user_update.dict(exclude_unset=True).items():
        if field == "password":
            setattr(self, field, user_update.create_hashed_password())
        elif field == "activation_token":
            setattr(
                self, field, user_update.create_hashed_activation_token()
            )
        else:
            setattr(self, field, value)

    self.updated = datetime.now()
    return self

sql_zen_store

SQL Zen Store implementation.

SQLDatabaseDriver (StrEnum)

SQL database drivers supported by the SQL ZenML store.

Source code in zenml/zen_stores/sql_zen_store.py
class SQLDatabaseDriver(StrEnum):
    """SQL database drivers supported by the SQL ZenML store."""

    MYSQL = "mysql"
    SQLITE = "sqlite"

SqlZenStore (BaseZenStore) pydantic-model

Store Implementation that uses SQL database backend.

Attributes:

Name Type Description
config RestZenStoreConfiguration

The configuration of the SQL ZenML store.

skip_migrations

Whether to skip migrations when initializing the store.

TYPE ClassVar[zenml.enums.StoreType]

The type of the store.

CONFIG_TYPE ClassVar[Type[zenml.config.store_config.StoreConfiguration]]

The type of the store configuration.

_engine

The SQLAlchemy engine.

_metadata_store

The metadata store.

_sync_lock

A thread mutex used to ensure thread safety during the pipeline run synchronization.

Source code in zenml/zen_stores/sql_zen_store.py
class SqlZenStore(BaseZenStore):
    """Store Implementation that uses SQL database backend.

    Attributes:
        config: The configuration of the SQL ZenML store.
        skip_migrations: Whether to skip migrations when initializing the store.
        TYPE: The type of the store.
        CONFIG_TYPE: The type of the store configuration.
        _engine: The SQLAlchemy engine.
        _metadata_store: The metadata store.
        _sync_lock: A thread mutex used to ensure thread safety during the
            pipeline run synchronization.
    """

    config: SqlZenStoreConfiguration
    skip_migrations: bool = False
    TYPE: ClassVar[StoreType] = StoreType.SQL
    CONFIG_TYPE: ClassVar[Type[StoreConfiguration]] = SqlZenStoreConfiguration

    _engine: Optional[Engine] = None
    _metadata_store: Optional["MetadataStore"] = None
    _highest_synced_run_mlmd_id: int = 0
    _alembic: Optional[Alembic] = None
    _sync_lock: Optional[Lock] = None

    @property
    def engine(self) -> Engine:
        """The SQLAlchemy engine.

        Returns:
            The SQLAlchemy engine.

        Raises:
            ValueError: If the store is not initialized.
        """
        if not self._engine:
            raise ValueError("Store not initialized")
        return self._engine

    @property
    def metadata_store(self) -> "MetadataStore":
        """The metadata store.

        Returns:
            The metadata store.

        Raises:
            ValueError: If the store is not initialized.
        """
        if not self._metadata_store:
            raise ValueError("Store not initialized")
        return self._metadata_store

    @property
    def runs_inside_server(self) -> bool:
        """Whether the store is running inside a server.

        Returns:
            Whether the store is running inside a server.
        """
        if ENV_ZENML_SERVER_DEPLOYMENT_TYPE in os.environ:
            return True
        return False

    @property
    def alembic(self) -> Alembic:
        """The Alembic wrapper.

        Returns:
            The Alembic wrapper.

        Raises:
            ValueError: If the store is not initialized.
        """
        if not self._alembic:
            raise ValueError("Store not initialized")
        return self._alembic

    @property
    def sync_lock(self) -> Lock:
        """The mutex used to synchronize pipeline runs.

        Returns:
            The mutex used to synchronize pipeline runs.

        Raises:
            ValueError: If the store is not initialized.
        """
        if not self._sync_lock:
            raise ValueError("Store not initialized")
        return self._sync_lock

    # ====================================
    # ZenML Store interface implementation
    # ====================================

    # --------------------------------
    # Initialization and configuration
    # --------------------------------

    def _initialize(self) -> None:
        """Initialize the SQL store."""
        from zenml.zen_stores.metadata_store import MetadataStore

        logger.debug("Initializing SqlZenStore at %s", self.config.url)

        metadata_config = self.config.get_metadata_config()
        self._metadata_store = MetadataStore(config=metadata_config)

        url, connect_args, engine_args = self.config.get_sqlmodel_config()
        self._engine = create_engine(
            url=url, connect_args=connect_args, **engine_args
        )
        self._alembic = Alembic(self.engine)
        if (
            not self.skip_migrations
            and ENV_ZENML_DISABLE_DATABASE_MIGRATION not in os.environ
        ):
            self.migrate_database()

        self._sync_lock = Lock()

    def migrate_database(self) -> None:
        """Migrate the database to the head as defined by the python package."""
        alembic_logger = logging.getLogger("alembic")

        # remove all existing handlers
        while len(alembic_logger.handlers):
            alembic_logger.removeHandler(alembic_logger.handlers[0])

        logging_level = get_logging_level()

        # suppress alembic info logging if the zenml logging level is not debug
        if logging_level == LoggingLevels.DEBUG:
            alembic_logger.setLevel(logging.DEBUG)
        else:
            alembic_logger.setLevel(logging.WARNING)

        alembic_logger.addHandler(get_console_handler())

        # We need to account for 3 distinct cases here:
        # 1. the database is completely empty (not initialized)
        # 2. the database is not empty, but has never been migrated with alembic
        #   before (i.e. was created with SQLModel back when alembic wasn't
        #   used)
        # 3. the database is not empty and has been migrated with alembic before
        revisions = self.alembic.current_revisions()
        if len(revisions) >= 1:
            if len(revisions) > 1:
                logger.warning(
                    "The ZenML database has more than one migration head "
                    "revision. This is not expected and might indicate a "
                    "database migration problem. Please raise an issue on "
                    "GitHub if you encounter this."
                )
            # Case 3: the database has been migrated with alembic before. Just
            # upgrade to the latest revision.
            self.alembic.upgrade()
        else:
            if self.alembic.db_is_empty():
                # Case 1: the database is empty. We can just create the
                # tables from scratch with alembic.
                self.alembic.upgrade()
            else:
                # Case 2: the database is not empty, but has never been
                # migrated with alembic before. We need to create the alembic
                # version table, initialize it with the first revision where we
                # introduced alembic and then upgrade to the latest revision.
                self.alembic.stamp(ZENML_ALEMBIC_START_REVISION)
                self.alembic.upgrade()

    def get_store_info(self) -> ServerModel:
        """Get information about the store.

        Returns:
            Information about the store.
        """
        model = super().get_store_info()
        sql_url = make_url(self.config.url)
        model.database_type = ServerDatabaseType(sql_url.drivername)
        return model

    # ------------
    # TFX Metadata
    # ------------

    def get_metadata_config(
        self, expand_certs: bool = False
    ) -> Union["ConnectionConfig", "MetadataStoreClientConfig"]:
        """Get the TFX metadata config of this ZenStore.

        Args:
            expand_certs: Whether to expand the certificate paths in the
                connection config to their value.

        Returns:
            The TFX metadata config of this ZenStore.
        """
        from ml_metadata.proto.metadata_store_pb2 import (
            MetadataStoreClientConfig,
        )

        # If the gRPC metadata store connection configuration is present,
        # advertise it to the client instead of the direct SQL connection
        # config.
        if self.config.grpc_metadata_host:
            mlmd_config = MetadataStoreClientConfig()
            mlmd_config.host = self.config.grpc_metadata_host
            mlmd_config.port = self.config.grpc_metadata_port
            if self.config.grpc_metadata_ssl_ca:
                mlmd_config.ssl_config.custom_ca = (
                    self.config.grpc_metadata_ssl_ca
                )
            if self.config.grpc_metadata_ssl_cert:
                mlmd_config.ssl_config.server_cert = (
                    self.config.grpc_metadata_ssl_cert
                )
            if self.config.grpc_metadata_ssl_key:
                mlmd_config.ssl_config.client_key = (
                    self.config.grpc_metadata_ssl_key
                )

            return mlmd_config

        return self.config.get_metadata_config(expand_certs=expand_certs)

    # ------
    # Stacks
    # ------

    @track(AnalyticsEvent.REGISTERED_STACK)
    def create_stack(self, stack: StackRequestModel) -> StackResponseModel:
        """Register a new stack.

        Args:
            stack: The stack to register.

        Returns:
            The registered stack.
        """
        with Session(self.engine) as session:
            self._fail_if_stack_with_name_exists_for_user(
                stack=stack, session=session
            )

            if stack.is_shared:
                self._fail_if_stack_with_name_already_shared(
                    stack=stack, session=session
                )

            # Get the Schemas of all components mentioned
            component_ids = [
                component_id
                for list_of_component_ids in stack.components.values()
                for component_id in list_of_component_ids
            ]
            filters = [
                (StackComponentSchema.id == component_id)
                for component_id in component_ids
            ]

            defined_components = session.exec(
                select(StackComponentSchema).where(or_(*filters))
            ).all()

            new_stack_schema = StackSchema(
                project_id=stack.project,
                user_id=stack.user,
                is_shared=stack.is_shared,
                name=stack.name,
                description=stack.description,
                components=defined_components,
            )

            session.add(new_stack_schema)
            session.commit()
            session.refresh(new_stack_schema)

            return new_stack_schema.to_model()

    def get_stack(self, stack_id: UUID) -> StackResponseModel:
        """Get a stack by its unique ID.

        Args:
            stack_id: The ID of the stack to get.

        Returns:
            The stack with the given ID.

        Raises:
            KeyError: if the stack doesn't exist.
        """
        with Session(self.engine) as session:
            stack = session.exec(
                select(StackSchema).where(StackSchema.id == stack_id)
            ).first()

            if stack is None:
                raise KeyError(f"Stack with ID {stack_id} not found.")
            return stack.to_model()

    def list_stacks(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        component_id: Optional[UUID] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[StackResponseModel]:
        """List all stacks matching the given filter criteria.

        Args:
            project_name_or_id: ID or name of the Project containing the stack
            user_name_or_id: Optionally filter stacks by their owner
            component_id: Optionally filter for stacks that contain the
                          component
            name: Optionally filter stacks by their name
            is_shared: Optionally filter out stacks by whether they are shared
                or not

        Returns:
            A list of all stacks matching the filter criteria.
        """
        with Session(self.engine) as session:
            # Get a list of all stacks
            query = select(StackSchema)
            if project_name_or_id:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
                query = query.where(StackSchema.project_id == project.id)
            if user_name_or_id:
                user = self._get_user_schema(user_name_or_id, session=session)
                query = query.where(StackSchema.user_id == user.id)
            if component_id:
                query = query.where(
                    StackCompositionSchema.stack_id == StackSchema.id
                ).where(StackCompositionSchema.component_id == component_id)
            if name:
                query = query.where(StackSchema.name == name)
            if is_shared is not None:
                query = query.where(StackSchema.is_shared == is_shared)

            stacks = session.exec(query.order_by(StackSchema.name)).all()

            return [stack.to_model() for stack in stacks]

    @track(AnalyticsEvent.UPDATED_STACK)
    def update_stack(
        self, stack_id: UUID, stack_update: StackUpdateModel
    ) -> StackResponseModel:
        """Update a stack.

        Args:
            stack_id: The ID of the stack update.
            stack_update: The update request on the stack.

        Returns:
            The updated stack.

        Raises:
            KeyError: if the stack doesn't exist.
            IllegalOperationError: if the stack is a default stack.
        """
        with Session(self.engine) as session:
            # Check if stack with the domain key (name, project, owner) already
            #  exists
            existing_stack = session.exec(
                select(StackSchema).where(StackSchema.id == stack_id)
            ).first()
            if existing_stack is None:
                raise KeyError(
                    f"Unable to update stack with id '{stack_id}': Found no"
                    f"existing stack with this id."
                )
            if existing_stack.name == DEFAULT_STACK_NAME:
                raise IllegalOperationError(
                    "The default stack cannot be modified."
                )
            # In case of a renaming update, make sure no stack already exists
            # with that name
            if stack_update.name:
                if existing_stack.name != stack_update.name:
                    self._fail_if_stack_with_name_exists_for_user(
                        stack=stack_update, session=session
                    )

            # Check if stack update makes the stack a shared stack. In that
            # case, check if a stack with the same name is already shared
            # within the project
            if stack_update.is_shared:
                if not existing_stack.is_shared and stack_update.is_shared:
                    self._fail_if_stack_with_name_already_shared(
                        stack=stack_update, session=session
                    )

            components = []
            if stack_update.components:
                filters = [
                    (StackComponentSchema.id == component_id)
                    for list_of_component_ids in stack_update.components.values()
                    for component_id in list_of_component_ids
                ]
                components = session.exec(
                    select(StackComponentSchema).where(or_(*filters))
                ).all()

            existing_stack.update(
                stack_update=stack_update,
                components=components,
            )

            session.add(existing_stack)
            session.commit()
            session.refresh(existing_stack)

            return existing_stack.to_model()

    @track(AnalyticsEvent.DELETED_STACK)
    def delete_stack(self, stack_id: UUID) -> None:
        """Delete a stack.

        Args:
            stack_id: The ID of the stack to delete.

        Raises:
            KeyError: if the stack doesn't exist.
            IllegalOperationError: if the stack is a default stack.
        """
        with Session(self.engine) as session:
            try:
                stack = session.exec(
                    select(StackSchema).where(StackSchema.id == stack_id)
                ).one()
                if stack.name == DEFAULT_STACK_NAME:
                    raise IllegalOperationError(
                        "The default stack cannot be deleted."
                    )
                session.delete(stack)
            except NoResultFound as error:
                raise KeyError from error

            session.commit()

    def _fail_if_stack_with_name_exists_for_user(
        self,
        stack: StackRequestModel,
        session: Session,
    ) -> None:
        """Raise an exception if a Component with same name exists for user.

        Args:
            stack: The Stack
            session: The Session

        Returns:
            None

        Raises:
            StackExistsError: If a Stack with the given name is already
                                       owned by the user
        """
        existing_domain_stack = session.exec(
            select(StackSchema)
            .where(StackSchema.name == stack.name)
            .where(StackSchema.project_id == stack.project)
            .where(StackSchema.user_id == stack.user)
        ).first()
        if existing_domain_stack is not None:
            project = self._get_project_schema(
                project_name_or_id=stack.project, session=session
            )
            user = self._get_user_schema(
                user_name_or_id=stack.user, session=session
            )
            raise StackExistsError(
                f"Unable to register stack with name "
                f"'{stack.name}': Found an existing stack with the same "
                f"name in the active project, '{project.name}', owned by the "
                f"same user, '{user.name}'."
            )
        return None

    def _fail_if_stack_with_name_already_shared(
        self,
        stack: StackRequestModel,
        session: Session,
    ) -> None:
        """Raise an exception if a Stack with same name is already shared.

        Args:
            stack: The Stack
            session: The Session

        Raises:
            StackExistsError: If a stack with the given name is already shared
                              by a user.
        """
        # Check if component with the same name, type is already shared
        # within the project
        existing_shared_stack = session.exec(
            select(StackSchema)
            .where(StackSchema.name == stack.name)
            .where(StackSchema.project_id == stack.project)
            .where(StackSchema.is_shared == stack.is_shared)
        ).first()
        if existing_shared_stack is not None:
            project = self._get_project_schema(
                project_name_or_id=stack.project, session=session
            )
            error_msg = (
                f"Unable to share stack with name '{stack.name}': Found an "
                f"existing shared stack with the same name in project "
                f"'{project.name}'"
            )
            if existing_shared_stack.user_id:
                owner_of_shared = self._get_user_schema(
                    existing_shared_stack.user_id, session=session
                )
                error_msg += f" owned by '{owner_of_shared.name}'."
            else:
                error_msg += ", which is currently not owned by any user."
            raise StackExistsError(error_msg)

    # ----------------
    # Stack components
    # ----------------

    @track(AnalyticsEvent.REGISTERED_STACK_COMPONENT)
    def create_stack_component(
        self,
        component: ComponentRequestModel,
    ) -> ComponentResponseModel:
        """Create a stack component.

        Args:
            component: The stack component to create.

        Returns:
            The created stack component.
        """
        with Session(self.engine) as session:
            self._fail_if_component_with_name_type_exists_for_user(
                name=component.name,
                component_type=component.type,
                user_id=component.user,
                project_id=component.project,
                session=session,
            )

            if component.is_shared:
                self._fail_if_component_with_name_type_already_shared(
                    name=component.name,
                    component_type=component.type,
                    project_id=component.project,
                    session=session,
                )

            # Create the component
            new_component = StackComponentSchema(
                name=component.name,
                project_id=component.project,
                user_id=component.user,
                is_shared=component.is_shared,
                type=component.type,
                flavor=component.flavor,
                configuration=base64.b64encode(
                    json.dumps(component.configuration).encode("utf-8")
                ),
            )

            session.add(new_component)
            session.commit()

            session.refresh(new_component)

            return new_component.to_model()

    def get_stack_component(self, component_id: UUID) -> ComponentResponseModel:
        """Get a stack component by ID.

        Args:
            component_id: The ID of the stack component to get.

        Returns:
            The stack component.

        Raises:
            KeyError: if the stack component doesn't exist.
        """
        with Session(self.engine) as session:
            stack_component = session.exec(
                select(StackComponentSchema).where(
                    StackComponentSchema.id == component_id
                )
            ).first()

            if stack_component is None:
                raise KeyError(
                    f"Stack component with ID {component_id} not found."
                )

            return stack_component.to_model()

    def list_stack_components(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        type: Optional[str] = None,
        flavor_name: Optional[str] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[ComponentResponseModel]:
        """List all stack components matching the given filter criteria.

        Args:
            project_name_or_id: The ID or name of the Project to which the stack
                components belong
            user_name_or_id: Optionally filter stack components by the owner
            type: Optionally filter by type of stack component
            flavor_name: Optionally filter by flavor
            name: Optionally filter stack component by name
            is_shared: Optionally filter out stack component by whether they are
                shared or not

        Returns:
            A list of all stack components matching the filter criteria.
        """
        with Session(self.engine) as session:
            # Get a list of all stacks
            query = select(StackComponentSchema)
            if project_name_or_id:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
                query = query.where(
                    StackComponentSchema.project_id == project.id
                )
            if user_name_or_id:
                user = self._get_user_schema(user_name_or_id, session=session)
                query = query.where(StackComponentSchema.user_id == user.id)
            if type:
                query = query.where(StackComponentSchema.type == type)
            if flavor_name:
                query = query.where(StackComponentSchema.flavor == flavor_name)
            if name:
                query = query.where(StackComponentSchema.name == name)
            if is_shared is not None:
                query = query.where(StackComponentSchema.is_shared == is_shared)

            list_of_stack_components_in_db = session.exec(query).all()

            return [comp.to_model() for comp in list_of_stack_components_in_db]

    @track(AnalyticsEvent.UPDATED_STACK_COMPONENT)
    def update_stack_component(
        self, component_id: UUID, component_update: ComponentUpdateModel
    ) -> ComponentResponseModel:
        """Update an existing stack component.

        Args:
            component_id: The ID of the stack component to update.
            component_update: The update to be applied to the stack component.

        Returns:
            The updated stack component.

        Raises:
            KeyError: if the stack component doesn't exist.
            IllegalOperationError: if the stack component is a default stack
                component.
        """
        with Session(self.engine) as session:
            existing_component = session.exec(
                select(StackComponentSchema).where(
                    StackComponentSchema.id == component_id
                )
            ).first()

            if existing_component is None:
                raise KeyError(
                    f"Unable to update component with id "
                    f"'{component_id}': Found no"
                    f"existing component with this id."
                )

            if (
                existing_component.name == DEFAULT_STACK_COMPONENT_NAME
                and existing_component.type
                in [
                    StackComponentType.ORCHESTRATOR,
                    StackComponentType.ARTIFACT_STORE,
                ]
            ):
                raise IllegalOperationError(
                    f"The default {existing_component.type} cannot be modified."
                )

            # In case of a renaming update, make sure no component of the same
            # type already exists with that name
            if component_update.name:
                if (
                    existing_component.name != component_update.name
                    and existing_component.user_id is not None
                ):
                    self._fail_if_component_with_name_type_exists_for_user(
                        name=component_update.name,
                        component_type=existing_component.type,
                        project_id=existing_component.project_id,
                        user_id=existing_component.user_id,
                        session=session,
                    )

            # Check if component update makes the component a shared component,
            # In that case check if a component with the same name, type are
            # already shared within the project
            if component_update.is_shared:
                if (
                    not existing_component.is_shared
                    and component_update.is_shared
                ):
                    self._fail_if_component_with_name_type_already_shared(
                        name=component_update.name or existing_component.name,
                        component_type=existing_component.type,
                        project_id=existing_component.project_id,
                        session=session,
                    )

            existing_component.update(component_update=component_update)
            session.add(existing_component)
            session.commit()

            return existing_component.to_model()

    @track(AnalyticsEvent.DELETED_STACK_COMPONENT)
    def delete_stack_component(self, component_id: UUID) -> None:
        """Delete a stack component.

        Args:
            component_id: The id of the stack component to delete.

        Raises:
            KeyError: if the stack component doesn't exist.
            IllegalOperationError: if the stack component is part of one or
                more stacks, or if it's a default stack component.
        """
        with Session(self.engine) as session:
            try:
                stack_component = session.exec(
                    select(StackComponentSchema).where(
                        StackComponentSchema.id == component_id
                    )
                ).one()
                if (
                    stack_component.name == DEFAULT_STACK_COMPONENT_NAME
                    and stack_component.type
                    in [
                        StackComponentType.ORCHESTRATOR,
                        StackComponentType.ARTIFACT_STORE,
                    ]
                ):
                    raise IllegalOperationError(
                        f"The default {stack_component.type} cannot be deleted."
                    )

                if len(stack_component.stacks) > 0:
                    raise IllegalOperationError(
                        f"Stack Component `{stack_component.name}` of type "
                        f"`{stack_component.type} cannot be "
                        f"deleted as it is part of "
                        f"{len(stack_component.stacks)} stacks. "
                        f"Before deleting this stack "
                        f"component, make sure to remove it "
                        f"from all stacks."
                    )
                else:
                    session.delete(stack_component)
            except NoResultFound as error:
                raise KeyError from error

            session.commit()

    @staticmethod
    def _fail_if_component_with_name_type_exists_for_user(
        name: str,
        component_type: StackComponentType,
        project_id: UUID,
        user_id: UUID,
        session: Session,
    ) -> None:
        """Raise an exception if a Component with same name/type exists.

        Args:
            name: The name of the component
            component_type: The type of the component
            project_id: The ID of the project
            user_id: The ID of the user
            session: The Session

        Returns:
            None

        Raises:
            StackComponentExistsError: If a component with the given name and
                                       type is already owned by the user
        """
        # Check if component with the same domain key (name, type, project,
        # owner) already exists
        existing_domain_component = session.exec(
            select(StackComponentSchema)
            .where(StackComponentSchema.name == name)
            .where(StackComponentSchema.project_id == project_id)
            .where(StackComponentSchema.user_id == user_id)
            .where(StackComponentSchema.type == component_type)
        ).first()
        if existing_domain_component is not None:
            raise StackComponentExistsError(
                f"Unable to register '{component_type.value}' component "
                f"with name '{name}': Found an existing "
                f"component with the same name and type in the same "
                f" project, '{existing_domain_component.project.name}', "
                f"owned by the same user, "
                f"'{existing_domain_component.user.name}'."
            )
        return None

    @staticmethod
    def _fail_if_component_with_name_type_already_shared(
        name: str,
        component_type: StackComponentType,
        project_id: UUID,
        session: Session,
    ) -> None:
        """Raise an exception if a Component with same name/type already shared.

        Args:
            name: The name of the component
            component_type: The type of the component
            project_id: The ID of the project
            session: The Session

        Raises:
            StackComponentExistsError: If a component with the given name and
                type is already shared by a user
        """
        # Check if component with the same name, type is already shared
        # within the project
        existing_shared_component = session.exec(
            select(StackComponentSchema)
            .where(StackComponentSchema.name == name)
            .where(StackComponentSchema.project_id == project_id)
            .where(StackComponentSchema.type == component_type)
            .where(StackComponentSchema.is_shared == True)
        ).first()
        if existing_shared_component is not None:
            raise StackComponentExistsError(
                f"Unable to shared component of type '{component_type.value}' "
                f"with name '{name}': Found an existing shared "
                f"component with the same name and type in project "
                f"'{project_id}'."
            )

    # -----------------------
    # Stack component flavors
    # -----------------------

    @track(AnalyticsEvent.CREATED_FLAVOR)
    def create_flavor(self, flavor: FlavorRequestModel) -> FlavorResponseModel:
        """Creates a new stack component flavor.

        Args:
            flavor: The stack component flavor to create.

        Returns:
            The newly created flavor.

        Raises:
            EntityExistsError: If a flavor with the same name and type
                is already owned by this user in this project.
        """
        with Session(self.engine) as session:
            # Check if component with the same domain key (name, type, project,
            # owner) already exists
            existing_flavor = session.exec(
                select(FlavorSchema)
                .where(FlavorSchema.name == flavor.name)
                .where(FlavorSchema.type == flavor.type)
                .where(FlavorSchema.project_id == flavor.project)
                .where(FlavorSchema.user_id == flavor.user)
            ).first()

            if existing_flavor is not None:
                raise EntityExistsError(
                    f"Unable to register '{flavor.type.value}' flavor "
                    f"with name '{flavor.name}': Found an existing "
                    f"flavor with the same name and type in the same "
                    f"'{flavor.project}' project owned by the same "
                    f"'{flavor.user}' user."
                )

            new_flavor = FlavorSchema(
                name=flavor.name,
                type=flavor.type,
                source=flavor.source,
                config_schema=flavor.config_schema,
                integration=flavor.integration,
                project_id=flavor.project,
                user_id=flavor.user,
            )
            session.add(new_flavor)
            session.commit()

            return new_flavor.to_model()

    def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
        """Get a flavor by ID.

        Args:
            flavor_id: The ID of the flavor to fetch.

        Returns:
            The stack component flavor.

        Raises:
            KeyError: if the stack component flavor doesn't exist.
        """
        with Session(self.engine) as session:
            flavor_in_db = session.exec(
                select(FlavorSchema).where(FlavorSchema.id == flavor_id)
            ).first()
            if flavor_in_db is None:
                raise KeyError(f"Flavor with ID {flavor_id} not found.")
            return flavor_in_db.to_model()

    def list_flavors(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        component_type: Optional[StackComponentType] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[FlavorResponseModel]:
        """List all stack component flavors matching the given filter criteria.

        Args:
            project_name_or_id: Optionally filter by the Project to which the
                component flavors belong
            component_type: Optionally filter by type of stack component
            user_name_or_id: Optionally filter by the owner
            component_type: Optionally filter by type of stack component
            name: Optionally filter flavors by name
            is_shared: Optionally filter out flavors by whether they are
                shared or not

        Returns:
            List of all the stack component flavors matching the given criteria
        """
        with Session(self.engine) as session:
            query = select(FlavorSchema)
            if project_name_or_id:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
                query = query.where(FlavorSchema.project_id == project.id)
            if component_type:
                query = query.where(FlavorSchema.type == component_type)
            if name:
                query = query.where(FlavorSchema.name == name)
            if user_name_or_id:
                user = self._get_user_schema(user_name_or_id, session=session)
                query = query.where(FlavorSchema.user_id == user.id)

            list_of_flavors_in_db = session.exec(query).all()

            return [flavor.to_model() for flavor in list_of_flavors_in_db]

    @track(AnalyticsEvent.DELETED_FLAVOR)
    def delete_flavor(self, flavor_id: UUID) -> None:
        """Delete a flavor.

        Args:
            flavor_id: The id of the flavor to delete.

        Raises:
            KeyError: if the flavor doesn't exist.
            IllegalOperationError: if the flavor is used by a stack component.
        """
        with Session(self.engine) as session:
            try:
                flavor_in_db = session.exec(
                    select(FlavorSchema).where(FlavorSchema.id == flavor_id)
                ).one()
                components_of_flavor = session.exec(
                    select(StackComponentSchema).where(
                        StackComponentSchema.flavor == flavor_in_db.name
                    )
                ).all()
                if len(components_of_flavor) > 0:
                    raise IllegalOperationError(
                        f"Stack Component `{flavor_in_db.name}` of type "
                        f"`{flavor_in_db.type} cannot be "
                        f"deleted as it is used by"
                        f"{len(components_of_flavor)} "
                        f"components. Before deleting this "
                        f"flavor, make sure to delete all "
                        f"associated components."
                    )
                else:
                    session.delete(flavor_in_db)
            except NoResultFound as error:
                raise KeyError from error

            session.commit()

    # -----
    # Users
    # -----

    @property
    def active_user_name(self) -> str:
        """Gets the active username.

        Returns:
            The active username.
        """
        return self._default_user_name

    @track(AnalyticsEvent.CREATED_USER)
    def create_user(self, user: UserRequestModel) -> UserResponseModel:
        """Creates a new user.

        Args:
            user: User to be created.

        Returns:
            The newly created user.

        Raises:
            EntityExistsError: If a user with the given name already exists.
        """
        with Session(self.engine) as session:
            # Check if user with the given name already exists
            existing_user = session.exec(
                select(UserSchema).where(UserSchema.name == user.name)
            ).first()
            if existing_user is not None:
                raise EntityExistsError(
                    f"Unable to create user with name '{user.name}': "
                    f"Found existing user with this name."
                )

            # Create the user
            new_user = UserSchema.from_request(user)
            session.add(new_user)
            session.commit()

            return new_user.to_model()

    def get_user(self, user_name_or_id: Union[str, UUID]) -> UserResponseModel:
        """Gets a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Returns:
            The requested user, if it was found.
        """
        with Session(self.engine) as session:
            user = self._get_user_schema(user_name_or_id, session=session)

            return user.to_model()

    def get_auth_user(self, user_name_or_id: Union[str, UUID]) -> UserAuthModel:
        """Gets the auth model to a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Returns:
            The requested user, if it was found.
        """
        with Session(self.engine) as session:
            user = self._get_user_schema(user_name_or_id, session=session)
            return UserAuthModel(
                id=user.id,
                name=user.name,
                full_name=user.full_name,
                email_opted_in=user.email_opted_in,
                active=user.active,
                created=user.created,
                updated=user.updated,
                password=user.password,
                activation_token=user.activation_token,
            )

    def list_users(self, name: Optional[str] = None) -> List[UserResponseModel]:
        """List all users.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all users.
        """
        with Session(self.engine) as session:
            query = select(UserSchema)
            if name:
                query = query.where(UserSchema.name == name)
            users = session.exec(query.order_by(UserSchema.name)).all()

            return [user.to_model() for user in users]

    @track(AnalyticsEvent.UPDATED_USER)
    def update_user(
        self, user_id: UUID, user_update: UserUpdateModel
    ) -> UserResponseModel:
        """Updates an existing user.

        Args:
            user_id: The id of the user to update.
            user_update: The update to be applied to the user.

        Returns:
            The updated user.

        Raises:
            IllegalOperationError: If the request tries to update the username
                for the default user account.
        """
        with Session(self.engine) as session:
            existing_user = self._get_user_schema(user_id, session=session)
            if (
                existing_user.name == self._default_user_name
                and "name" in user_update.__fields_set__
                and user_update.name != existing_user.name
            ):
                raise IllegalOperationError(
                    "The username of the default user account cannot be "
                    "changed."
                )
            existing_user.update(user_update=user_update)
            session.add(existing_user)
            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_user)
            return existing_user.to_model()

    @track(AnalyticsEvent.DELETED_USER)
    def delete_user(self, user_name_or_id: Union[str, UUID]) -> None:
        """Deletes a user.

        Args:
            user_name_or_id: The name or the ID of the user to delete.

        Raises:
            IllegalOperationError: If the user is the default user account.
        """
        with Session(self.engine) as session:
            user = self._get_user_schema(user_name_or_id, session=session)
            if user.name == self._default_user_name:
                raise IllegalOperationError(
                    "The default user account cannot be deleted."
                )
            session.delete(user)
            session.commit()

    # -----
    # Teams
    # -----

    @track(AnalyticsEvent.CREATED_TEAM)
    def create_team(self, team: TeamRequestModel) -> TeamResponseModel:
        """Creates a new team.

        Args:
            team: The team model to create.

        Returns:
            The newly created team.

        Raises:
            EntityExistsError: If a team with the given name already exists.
        """
        with Session(self.engine) as session:
            # Check if team with the given name already exists
            existing_team = session.exec(
                select(TeamSchema).where(TeamSchema.name == team.name)
            ).first()
            if existing_team is not None:
                raise EntityExistsError(
                    f"Unable to create team with name '{team.name}': "
                    f"Found existing team with this name."
                )

            defined_users = []
            if team.users:
                # Get the Schemas of all users mentioned
                filters = [(UserSchema.id == user_id) for user_id in team.users]

                defined_users = session.exec(
                    select(UserSchema).where(or_(*filters))
                ).all()

            # Create the team
            new_team = TeamSchema(name=team.name, users=defined_users)
            session.add(new_team)
            session.commit()

            return new_team.to_model()

    def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
        """Gets a specific team.

        Args:
            team_name_or_id: Name or ID of the team to get.

        Returns:
            The requested team.
        """
        with Session(self.engine) as session:
            team = self._get_team_schema(team_name_or_id, session=session)
            return team.to_model()

    def list_teams(self, name: Optional[str] = None) -> List[TeamResponseModel]:
        """List all teams.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all teams.
        """
        with Session(self.engine) as session:
            query = select(TeamSchema)
            if name:
                query = query.where(TeamSchema.name == name)
            teams = session.exec(query.order_by(TeamSchema.name)).all()

            return [team.to_model() for team in teams]

    @track(AnalyticsEvent.UPDATED_TEAM)
    def update_team(
        self, team_id: UUID, team_update: TeamUpdateModel
    ) -> TeamResponseModel:
        """Update an existing team.

        Args:
            team_id: The ID of the team to be updated.
            team_update: The update to be applied to the team.

        Returns:
            The updated team.

        Raises:
            KeyError: if the team does not exist.
        """
        with Session(self.engine) as session:
            existing_team = session.exec(
                select(TeamSchema).where(TeamSchema.id == team_id)
            ).first()

            if existing_team is None:
                raise KeyError(
                    f"Unable to update team with id "
                    f"'{team_id}': Found no"
                    f"existing teams with this id."
                )

            # Update the team
            existing_team.update(team_update=team_update)
            existing_team.users = []
            if "users" in team_update.__fields_set__ and team_update.users:
                for user in team_update.users:
                    existing_team.users.append(
                        self._get_user_schema(
                            user_name_or_id=user, session=session
                        )
                    )

            session.add(existing_team)
            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_team)
            return existing_team.to_model()

    @track(AnalyticsEvent.DELETED_TEAM)
    def delete_team(self, team_name_or_id: Union[str, UUID]) -> None:
        """Deletes a team.

        Args:
            team_name_or_id: Name or ID of the team to delete.
        """
        with Session(self.engine) as session:
            team = self._get_team_schema(team_name_or_id, session=session)
            session.delete(team)
            session.commit()

    # -----
    # Roles
    # -----

    @track(AnalyticsEvent.CREATED_ROLE)
    def create_role(self, role: RoleRequestModel) -> RoleResponseModel:
        """Creates a new role.

        Args:
            role: The role model to create.

        Returns:
            The newly created role.

        Raises:
            EntityExistsError: If a role with the given name already exists.
        """
        with Session(self.engine) as session:
            # Check if role with the given name already exists
            existing_role = session.exec(
                select(RoleSchema).where(RoleSchema.name == role.name)
            ).first()
            if existing_role is not None:
                raise EntityExistsError(
                    f"Unable to create role '{role.name}': Role already exists."
                )

            # Create role
            role_schema = RoleSchema.from_request(role)
            session.add(role_schema)
            session.commit()
            # Add all permissions
            for p in role.permissions:
                session.add(
                    RolePermissionSchema(name=p, role_id=role_schema.id)
                )

            session.commit()
            return role_schema.to_model()

    def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
        """Gets a specific role.

        Args:
            role_name_or_id: Name or ID of the role to get.

        Returns:
            The requested role.
        """
        with Session(self.engine) as session:
            role = self._get_role_schema(role_name_or_id, session=session)
            return role.to_model()

    def list_roles(self, name: Optional[str] = None) -> List[RoleResponseModel]:
        """List all roles.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all roles.
        """
        with Session(self.engine) as session:
            query = select(RoleSchema)
            if name:
                query = query.where(RoleSchema.name == name)
            roles = session.exec(query.order_by(RoleSchema.name)).all()

            return [role.to_model() for role in roles]

    @track(AnalyticsEvent.UPDATED_ROLE)
    def update_role(
        self, role_id: UUID, role_update: RoleUpdateModel
    ) -> RoleResponseModel:
        """Update an existing role.

        Args:
            role_id: The ID of the role to be updated.
            role_update: The update to be applied to the role.

        Returns:
            The updated role.

        Raises:
            KeyError: if the role does not exist.
            IllegalOperationError: if the role is a system role.
        """
        with Session(self.engine) as session:
            existing_role = session.exec(
                select(RoleSchema).where(RoleSchema.id == role_id)
            ).first()

            if existing_role is None:
                raise KeyError(
                    f"Unable to update role with id "
                    f"'{role_id}': Found no"
                    f"existing roles with this id."
                )

            if existing_role.name in [DEFAULT_ADMIN_ROLE, DEFAULT_GUEST_ROLE]:
                raise IllegalOperationError(
                    f"The built-in role '{existing_role.name}' cannot be "
                    f"updated."
                )

            # The relationship table for roles behaves different from the other
            #  ones. As such the required updates on the permissions have to be
            #  done manually.
            if "permissions" in role_update.__fields_set__:
                existing_permissions = {
                    p.name for p in existing_role.permissions
                }

                diff = existing_permissions.symmetric_difference(
                    role_update.permissions
                )

                for permission in diff:
                    if permission not in role_update.permissions:
                        permission_to_delete = session.exec(
                            select(RolePermissionSchema)
                            .where(RolePermissionSchema.name == permission)
                            .where(
                                RolePermissionSchema.role_id == existing_role.id
                            )
                        ).one_or_none()
                        session.delete(permission_to_delete)

                    elif permission not in existing_permissions:
                        session.add(
                            RolePermissionSchema(
                                name=permission, role_id=existing_role.id
                            )
                        )

            # Update the role
            existing_role.update(role_update=role_update)
            session.add(existing_role)
            session.commit()

            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_role)
            return existing_role.to_model()

    @track(AnalyticsEvent.DELETED_ROLE)
    def delete_role(self, role_name_or_id: Union[str, UUID]) -> None:
        """Deletes a role.

        Args:
            role_name_or_id: Name or ID of the role to delete.

        Raises:
            IllegalOperationError: If the role is still assigned to users or
                the role is one of the built-in roles.
        """
        with Session(self.engine) as session:
            role = self._get_role_schema(role_name_or_id, session=session)
            if role.name in [DEFAULT_ADMIN_ROLE, DEFAULT_GUEST_ROLE]:
                raise IllegalOperationError(
                    f"The built-in role '{role.name}' cannot be deleted."
                )
            user_role = session.exec(
                select(UserRoleAssignmentSchema).where(
                    UserRoleAssignmentSchema.role_id == role.id
                )
            ).all()
            team_role = session.exec(
                select(TeamRoleAssignmentSchema).where(
                    TeamRoleAssignmentSchema.role_id == role.id
                )
            ).all()

            if len(user_role) > 0 or len(team_role) > 0:
                raise IllegalOperationError(
                    f"Role `{role.name}` of type cannot be "
                    f"deleted as it is in use by multiple users and teams. "
                    f"Before deleting this role make sure to remove all "
                    f"instances where this role is used."
                )
            else:
                # Delete role
                session.delete(role)
                session.commit()

    # ----------------
    # Role assignments
    # ----------------

    def _list_user_role_assignments(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        role_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
    ) -> List[RoleAssignmentResponseModel]:
        """List all user role assignments.

        Args:
            project_name_or_id: If provided, only return role assignments for
                this project.
            user_name_or_id: If provided, only list assignments for this user.
            role_name_or_id: If provided, only list assignments of the given
                role

        Returns:
            A list of user role assignments.
        """
        with Session(self.engine) as session:
            query = select(UserRoleAssignmentSchema)
            if project_name_or_id is not None:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
                query = query.where(
                    UserRoleAssignmentSchema.project_id == project.id
                )
            if role_name_or_id is not None:
                role = self._get_role_schema(role_name_or_id, session=session)
                query = query.where(UserRoleAssignmentSchema.role_id == role.id)
            if user_name_or_id is not None:
                user = self._get_user_schema(user_name_or_id, session=session)
                query = query.where(UserRoleAssignmentSchema.user_id == user.id)
            assignments = session.exec(query).all()
            return [assignment.to_model() for assignment in assignments]

    def _list_team_role_assignments(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        team_name_or_id: Optional[Union[str, UUID]] = None,
        role_name_or_id: Optional[Union[str, UUID]] = None,
    ) -> List[RoleAssignmentResponseModel]:
        """List all team role assignments.

        Args:
            project_name_or_id: If provided, only return role assignments for
                this project.
            team_name_or_id: If provided, only list assignments for this team.
            role_name_or_id: If provided, only list assignments of the given
                role

        Returns:
            A list of team role assignments.
        """
        with Session(self.engine) as session:
            query = select(TeamRoleAssignmentSchema)
            if project_name_or_id is not None:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
                query = query.where(
                    TeamRoleAssignmentSchema.project_id == project.id
                )
            if role_name_or_id is not None:
                role = self._get_role_schema(role_name_or_id, session=session)
                query = query.where(TeamRoleAssignmentSchema.role_id == role.id)
            if team_name_or_id is not None:
                team = self._get_team_schema(team_name_or_id, session=session)
                query = query.where(TeamRoleAssignmentSchema.team_id == team.id)
            assignments = session.exec(query).all()
            return [assignment.to_model() for assignment in assignments]

    def list_role_assignments(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        role_name_or_id: Optional[Union[str, UUID]] = None,
        team_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
    ) -> List[RoleAssignmentResponseModel]:
        """List all role assignments.

        Args:
            project_name_or_id: If provided, only return role assignments for
                this project.
            role_name_or_id: If provided, only list assignments of the given
                role
            team_name_or_id: If provided, only list assignments for this team.
            user_name_or_id: If provided, only list assignments for this user.

        Returns:
            A list of all role assignments.
        """
        user_role_assignments = self._list_user_role_assignments(
            project_name_or_id=project_name_or_id,
            user_name_or_id=user_name_or_id,
            role_name_or_id=role_name_or_id,
        )
        team_role_assignments = self._list_team_role_assignments(
            project_name_or_id=project_name_or_id,
            team_name_or_id=team_name_or_id,
            role_name_or_id=role_name_or_id,
        )
        return user_role_assignments + team_role_assignments

    def _assign_role_to_user(
        self,
        role_name_or_id: Union[str, UUID],
        user_name_or_id: Union[str, UUID],
        project_name_or_id: Optional[Union[str, UUID]] = None,
    ) -> RoleAssignmentResponseModel:
        """Assigns a role to a user, potentially scoped to a specific project.

        Args:
            project_name_or_id: Optional ID of a project in which to assign the
                role. If this is not provided, the role will be assigned
                globally.
            role_name_or_id: Name or ID of the role to assign.
            user_name_or_id: Name or ID of the user to which to assign the role.

        Returns:
            A model of the role assignment.

        Raises:
            EntityExistsError: If the role assignment already exists.
        """
        with Session(self.engine) as session:
            role = self._get_role_schema(role_name_or_id, session=session)
            project: Optional[ProjectSchema] = None
            if project_name_or_id:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
            user = self._get_user_schema(user_name_or_id, session=session)
            query = select(UserRoleAssignmentSchema).where(
                UserRoleAssignmentSchema.user_id == user.id,
                UserRoleAssignmentSchema.role_id == role.id,
            )
            if project is not None:
                query = query.where(
                    UserRoleAssignmentSchema.project_id == project.id
                )
            existing_role_assignment = session.exec(query).first()
            if existing_role_assignment is not None:
                raise EntityExistsError(
                    f"Unable to assign role '{role.name}' to user "
                    f"'{user.name}': Role already assigned in this project."
                )
            role_assignment = UserRoleAssignmentSchema(
                role_id=role.id,
                user_id=user.id,
                project_id=project.id if project else None,
                role=role,
                user=user,
                project=project,
            )
            session.add(role_assignment)
            session.commit()
            return role_assignment.to_model()

    def _assign_role_to_team(
        self,
        role_name_or_id: Union[str, UUID],
        team_name_or_id: Union[str, UUID],
        project_name_or_id: Optional[Union[str, UUID]] = None,
    ) -> RoleAssignmentResponseModel:
        """Assigns a role to a team, potentially scoped to a specific project.

        Args:
            role_name_or_id: Name or ID of the role to assign.
            team_name_or_id: Name or ID of the team to which to assign the role.
            project_name_or_id: Optional ID of a project in which to assign the
                role. If this is not provided, the role will be assigned
                globally.

        Returns:
            A model of the role assignment.

        Raises:
            EntityExistsError: If the role assignment already exists.
        """
        with Session(self.engine) as session:
            role = self._get_role_schema(role_name_or_id, session=session)
            project: Optional[ProjectSchema] = None
            if project_name_or_id:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
            team = self._get_team_schema(team_name_or_id, session=session)
            query = select(TeamRoleAssignmentSchema).where(
                TeamRoleAssignmentSchema.team_id == team.id,
                TeamRoleAssignmentSchema.role_id == role.id,
            )
            if project is not None:
                query = query.where(
                    TeamRoleAssignmentSchema.project_id == project.id
                )
            existing_role_assignment = session.exec(query).first()
            if existing_role_assignment is not None:
                raise EntityExistsError(
                    f"Unable to assign role '{role.name}' to team "
                    f"'{team.name}': Role already assigned in this project."
                )
            role_assignment = TeamRoleAssignmentSchema(
                role_id=role.id,
                team_id=team.id,
                project_id=project.id if project else None,
                role=role,
                team=team,
                project=project,
            )
            session.add(role_assignment)
            session.commit()

            return role_assignment.to_model()

    def create_role_assignment(
        self, role_assignment: RoleAssignmentRequestModel
    ) -> RoleAssignmentResponseModel:
        """Assigns a role to a user or team, scoped to a specific project.

        Args:
            role_assignment: The role assignment to create.

        Returns:
            The created role assignment.

        Raises:
            ValueError: If neither a user nor a team is specified.
        """
        if role_assignment.user:
            return self._assign_role_to_user(
                role_name_or_id=role_assignment.role,
                user_name_or_id=role_assignment.user,
                project_name_or_id=role_assignment.project,
            )
        if role_assignment.team:
            return self._assign_role_to_team(
                role_name_or_id=role_assignment.role,
                team_name_or_id=role_assignment.team,
                project_name_or_id=role_assignment.project,
            )
        raise ValueError(
            "Role assignment must be assigned to either a user or a team."
        )

    def get_role_assignment(
        self, role_assignment_id: UUID
    ) -> RoleAssignmentResponseModel:
        """Gets a role assignment by ID.

        Args:
            role_assignment_id: ID of the role assignment to get.

        Returns:
            The role assignment.

        Raises:
            KeyError: If the role assignment does not exist.
        """
        with Session(self.engine) as session:
            user_role = session.exec(
                select(UserRoleAssignmentSchema).where(
                    UserRoleAssignmentSchema.id == role_assignment_id
                )
            ).one_or_none()

            if user_role:
                return user_role.to_model()

            team_role = session.exec(
                select(TeamRoleAssignmentSchema).where(
                    TeamRoleAssignmentSchema.id == role_assignment_id
                )
            ).one_or_none()

            if team_role:
                return team_role.to_model()

            raise KeyError(
                f"RoleAssignment with ID {role_assignment_id} not found."
            )

    def delete_role_assignment(self, role_assignment_id: UUID) -> None:
        """Delete a specific role assignment.

        Args:
            role_assignment_id: The ID of the specific role assignment.

        Raises:
            KeyError: If the role assignment does not exist.
        """
        with Session(self.engine) as session:
            user_role = session.exec(
                select(UserRoleAssignmentSchema).where(
                    UserRoleAssignmentSchema.id == role_assignment_id
                )
            ).one_or_none()
            if user_role:
                session.delete(user_role)

            team_role = session.exec(
                select(TeamRoleAssignmentSchema).where(
                    TeamRoleAssignmentSchema.id == role_assignment_id
                )
            ).one_or_none()

            if team_role:
                session.delete(team_role)

            if user_role is None and team_role is None:
                raise KeyError(
                    f"RoleAssignment with ID {role_assignment_id} not found."
                )
            else:
                session.commit()

    # --------
    # Projects
    # --------

    @track(AnalyticsEvent.CREATED_PROJECT)
    def create_project(
        self, project: ProjectRequestModel
    ) -> ProjectResponseModel:
        """Creates a new project.

        Args:
            project: The project to create.

        Returns:
            The newly created project.

        Raises:
            EntityExistsError: If a project with the given name already exists.
        """
        with Session(self.engine) as session:
            # Check if project with the given name already exists
            existing_project = session.exec(
                select(ProjectSchema).where(ProjectSchema.name == project.name)
            ).first()
            if existing_project is not None:
                raise EntityExistsError(
                    f"Unable to create project {project.name}: "
                    "A project with this name already exists."
                )

            # Create the project
            new_project = ProjectSchema.from_request(project)
            session.add(new_project)
            session.commit()

            # Explicitly refresh the new_project schema
            session.refresh(new_project)

            return new_project.to_model()

    def get_project(
        self, project_name_or_id: Union[str, UUID]
    ) -> ProjectResponseModel:
        """Get an existing project by name or ID.

        Args:
            project_name_or_id: Name or ID of the project to get.

        Returns:
            The requested project if one was found.
        """
        with Session(self.engine) as session:
            project = self._get_project_schema(
                project_name_or_id, session=session
            )
        return project.to_model()

    def list_projects(
        self, name: Optional[str] = None
    ) -> List[ProjectResponseModel]:
        """List all projects.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all projects.
        """
        with Session(self.engine) as session:
            query = select(ProjectSchema)
            if name:
                query = query.where(ProjectSchema.name == name)
            projects = session.exec(query.order_by(ProjectSchema.name)).all()

            return [project.to_model() for project in projects]

    @track(AnalyticsEvent.UPDATED_PROJECT)
    def update_project(
        self, project_id: UUID, project_update: ProjectUpdateModel
    ) -> ProjectResponseModel:
        """Update an existing project.

        Args:
            project_id: The ID of the project to be updated.
            project_update: The update to be applied to the project.

        Returns:
            The updated project.

        Raises:
            IllegalOperationError: if the project is the default project.
            KeyError: if the project does not exist.
        """
        with Session(self.engine) as session:
            existing_project = session.exec(
                select(ProjectSchema).where(ProjectSchema.id == project_id)
            ).first()
            if existing_project is None:
                raise KeyError(
                    f"Unable to update project with id "
                    f"'{project_id}': Found no"
                    f"existing projects with this id."
                )
            if (
                existing_project.name == self._default_project_name
                and "name" in project_update.__fields_set__
                and project_update.name != existing_project.name
            ):
                raise IllegalOperationError(
                    "The name of the default project cannot be changed."
                )

            # Update the project
            existing_project.update(project_update=project_update)
            session.add(existing_project)
            session.commit()

            # Refresh the Model that was just created
            session.refresh(existing_project)
            return existing_project.to_model()

    @track(AnalyticsEvent.DELETED_PROJECT)
    def delete_project(self, project_name_or_id: Union[str, UUID]) -> None:
        """Deletes a project.

        Args:
            project_name_or_id: Name or ID of the project to delete.

        Raises:
            IllegalOperationError: If the project is the default project.
        """
        with Session(self.engine) as session:
            # Check if project with the given name exists
            project = self._get_project_schema(
                project_name_or_id, session=session
            )
            if project.name == self._default_project_name:
                raise IllegalOperationError(
                    "The default project cannot be deleted."
                )

            session.delete(project)
            session.commit()

    # ---------
    # Pipelines
    # ---------

    @track(AnalyticsEvent.CREATE_PIPELINE)
    def create_pipeline(
        self,
        pipeline: PipelineRequestModel,
    ) -> PipelineResponseModel:
        """Creates a new pipeline in a project.

        Args:
            pipeline: The pipeline to create.

        Returns:
            The newly created pipeline.

        Raises:
            EntityExistsError: If an identical pipeline already exists.
        """
        with Session(self.engine) as session:
            # Check if pipeline with the given name already exists
            existing_pipeline = session.exec(
                select(PipelineSchema)
                .where(PipelineSchema.name == pipeline.name)
                .where(PipelineSchema.project_id == pipeline.project)
            ).first()
            if existing_pipeline is not None:
                raise EntityExistsError(
                    f"Unable to create pipeline in project "
                    f"'{pipeline.project}': A pipeline with this name "
                    f"already exists."
                )

            # Create the pipeline
            new_pipeline = PipelineSchema(
                name=pipeline.name,
                project_id=pipeline.project,
                user_id=pipeline.user,
                docstring=pipeline.docstring,
                spec=pipeline.spec.json(sort_keys=True),
            )
            session.add(new_pipeline)
            session.commit()
            # Refresh the Model that was just created
            session.refresh(new_pipeline)

            return new_pipeline.to_model()

    def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
        """Get a pipeline with a given ID.

        Args:
            pipeline_id: ID of the pipeline.

        Returns:
            The pipeline.

        Raises:
            KeyError: if the pipeline does not exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline with the given ID exists
            pipeline = session.exec(
                select(PipelineSchema).where(PipelineSchema.id == pipeline_id)
            ).first()
            if pipeline is None:
                raise KeyError(
                    f"Unable to get pipeline with ID '{pipeline_id}': "
                    "No pipeline with this ID found."
                )

            return pipeline.to_model()

    def list_pipelines(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        name: Optional[str] = None,
    ) -> List[PipelineResponseModel]:
        """List all pipelines in the project.

        Args:
            project_name_or_id: If provided, only list pipelines in this
                project.
            user_name_or_id: If provided, only list pipelines from this user.
            name: If provided, only list pipelines with this name.

        Returns:
            A list of pipelines.
        """
        with Session(self.engine) as session:
            # Check if project with the given name exists
            query = select(PipelineSchema)
            if project_name_or_id is not None:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
                query = query.where(PipelineSchema.project_id == project.id)

            if user_name_or_id is not None:
                user = self._get_user_schema(user_name_or_id, session=session)
                query = query.where(PipelineSchema.user_id == user.id)

            if name:
                query = query.where(PipelineSchema.name == name)

            # Get all pipelines in the project
            pipelines = session.exec(query).all()
            return [pipeline.to_model() for pipeline in pipelines]

    @track(AnalyticsEvent.UPDATE_PIPELINE)
    def update_pipeline(
        self,
        pipeline_id: UUID,
        pipeline_update: PipelineUpdateModel,
    ) -> PipelineResponseModel:
        """Updates a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to be updated.
            pipeline_update: The update to be applied.

        Returns:
            The updated pipeline.

        Raises:
            KeyError: if the pipeline doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline with the given ID exists
            existing_pipeline = session.exec(
                select(PipelineSchema).where(PipelineSchema.id == pipeline_id)
            ).first()
            if existing_pipeline is None:
                raise KeyError(
                    f"Unable to update pipeline with ID {pipeline_id}: "
                    f"No pipeline with this ID found."
                )

            # Update the pipeline
            existing_pipeline.update(pipeline_update)

            session.add(existing_pipeline)
            session.commit()

            return existing_pipeline.to_model()

    @track(AnalyticsEvent.DELETE_PIPELINE)
    def delete_pipeline(self, pipeline_id: UUID) -> None:
        """Deletes a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to delete.

        Raises:
            KeyError: if the pipeline doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline with the given ID exists
            pipeline = session.exec(
                select(PipelineSchema).where(PipelineSchema.id == pipeline_id)
            ).first()
            if pipeline is None:
                raise KeyError(
                    f"Unable to delete pipeline with ID {pipeline_id}: "
                    f"No pipeline with this ID found."
                )

            session.delete(pipeline)
            session.commit()

    # --------------
    # Pipeline runs
    # --------------

    def create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Creates a pipeline run.

        Args:
            pipeline_run: The pipeline run to create.

        Returns:
            The created pipeline run.

        Raises:
            EntityExistsError: If an identical pipeline run already exists.
        """
        with Session(self.engine) as session:

            # Check if pipeline run with same name already exists.
            existing_domain_run = session.exec(
                select(PipelineRunSchema).where(
                    PipelineRunSchema.name == pipeline_run.name
                )
            ).first()
            if existing_domain_run is not None:
                raise EntityExistsError(
                    f"Unable to create pipeline run: A pipeline run with name "
                    f"'{pipeline_run.name}' already exists."
                )

            # Check if pipeline run with same ID already exists.
            existing_id_run = session.exec(
                select(PipelineRunSchema).where(
                    PipelineRunSchema.id == pipeline_run.id
                )
            ).first()
            if existing_id_run is not None:
                raise EntityExistsError(
                    f"Unable to create pipeline run: A pipeline run with ID "
                    f"'{pipeline_run.id}' already exists."
                )

            # Check if pipeline run with same name MLMD ID already exists.
            if pipeline_run.mlmd_id is not None:
                existing_mlmd_id_run = session.exec(
                    select(PipelineRunSchema).where(
                        PipelineRunSchema.mlmd_id == pipeline_run.mlmd_id
                    )
                ).first()
                if existing_mlmd_id_run is not None:
                    raise EntityExistsError(
                        f"Unable to create pipeline run: A pipeline run with "
                        f"MLMD ID '{pipeline_run.mlmd_id}' already exists."
                    )

            # Query stack to ensure it exists in the DB
            stack_id = None
            if pipeline_run.stack is not None:
                stack_id = session.exec(
                    select(StackSchema.id).where(
                        StackSchema.id == pipeline_run.stack
                    )
                ).first()
                if stack_id is None:
                    logger.warning(
                        f"No stack found for this run. "
                        f"Creating pipeline run '{pipeline_run.name}' without "
                        "linked stack."
                    )

            # Query pipeline to ensure it exists in the DB
            pipeline_id = None
            if pipeline_run.pipeline is not None:
                pipeline_id = session.exec(
                    select(PipelineSchema.id).where(
                        PipelineSchema.id == pipeline_run.pipeline
                    )
                ).first()
                if pipeline_id is None:
                    logger.warning(
                        f"No pipeline found. Creating pipeline run "
                        f"'{pipeline_run.name}' as unlisted run."
                    )

            configuration = json.dumps(pipeline_run.pipeline_configuration)

            new_run = PipelineRunSchema(
                id=pipeline_run.id,
                name=pipeline_run.name,
                orchestrator_run_id=pipeline_run.orchestrator_run_id,
                stack_id=stack_id,
                project_id=pipeline_run.project,
                user_id=pipeline_run.user,
                pipeline_id=pipeline_id,
                status=pipeline_run.status,
                pipeline_configuration=configuration,
                num_steps=pipeline_run.num_steps,
                git_sha=pipeline_run.git_sha,
                zenml_version=pipeline_run.zenml_version,
                mlmd_id=pipeline_run.mlmd_id,
            )

            # Create the pipeline run
            session.add(new_run)
            session.commit()

            return new_run.to_model()

    def get_run(
        self, run_name_or_id: Union[str, UUID]
    ) -> PipelineRunResponseModel:
        """Gets a pipeline run.

        Args:
            run_name_or_id: The name or ID of the pipeline run to get.

        Returns:
            The pipeline run.
        """
        if not self.runs_inside_server:
            self._sync_runs()
        with Session(self.engine) as session:
            run = self._get_run_schema(run_name_or_id, session=session)
            return run.to_model()

    def get_or_create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Gets or creates a pipeline run.

        If a run with the same ID or name already exists, it is returned.
        Otherwise, a new run is created.

        Args:
            pipeline_run: The pipeline run to get or create.

        Returns:
            The pipeline run.
        """
        # We want to have the 'create' statement in the try block since running
        # it first will reduce concurrency issues.
        try:
            return self.create_run(pipeline_run)
        except EntityExistsError:
            # Currently, an `EntityExistsError` is raised if either the run ID
            # or the run name already exists. Therefore, we need to have another
            # try block since getting the run by ID might still fail.
            try:
                return self.get_run(pipeline_run.id)
            except KeyError:
                return self.get_run(pipeline_run.name)

    def list_runs(
        self,
        name: Optional[str] = None,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        stack_id: Optional[UUID] = None,
        component_id: Optional[UUID] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        pipeline_id: Optional[UUID] = None,
        unlisted: bool = False,
    ) -> List[PipelineRunResponseModel]:
        """Gets all pipeline runs.

        Args:
            name: Run name if provided
            project_name_or_id: If provided, only return runs for this project.
            stack_id: If provided, only return runs for this stack.
            component_id: Optionally filter for runs that used the
                          component
            user_name_or_id: If provided, only return runs for this user.
            pipeline_id: If provided, only return runs for this pipeline.
            unlisted: If True, only return unlisted runs that are not
                associated with any pipeline (filter by pipeline_id==None).

        Returns:
            A list of all pipeline runs.
        """
        if not self.runs_inside_server:
            self._sync_runs()
        with Session(self.engine) as session:
            query = select(PipelineRunSchema)
            if project_name_or_id is not None:
                project = self._get_project_schema(
                    project_name_or_id, session=session
                )
                query = query.where(PipelineRunSchema.project_id == project.id)
            if stack_id is not None:
                query = query.where(PipelineRunSchema.stack_id == stack_id)
            if component_id:
                query = query.where(
                    StackCompositionSchema.stack_id
                    == PipelineRunSchema.stack_id
                ).where(StackCompositionSchema.component_id == component_id)
            if name is not None:
                query = query.where(PipelineRunSchema.name == name)
            if pipeline_id is not None:
                query = query.where(
                    PipelineRunSchema.pipeline_id == pipeline_id
                )
            elif unlisted:
                query = query.where(is_(PipelineRunSchema.pipeline_id, None))
            if user_name_or_id is not None:
                user = self._get_user_schema(user_name_or_id, session=session)
                query = query.where(PipelineRunSchema.user_id == user.id)
            query = query.order_by(PipelineRunSchema.created)
            runs = session.exec(query).all()
            return [run.to_model() for run in runs]

    def update_run(
        self, run_id: UUID, run_update: PipelineRunUpdateModel
    ) -> PipelineRunResponseModel:
        """Updates a pipeline run.

        Args:
            run_id: The ID of the pipeline run to update.
            run_update: The update to be applied to the pipeline run.

        Returns:
            The updated pipeline run.

        Raises:
            KeyError: if the pipeline run doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if pipeline run with the given ID exists
            existing_run = session.exec(
                select(PipelineRunSchema).where(PipelineRunSchema.id == run_id)
            ).first()
            if existing_run is None:
                raise KeyError(
                    f"Unable to update pipeline run with ID {run_id}: "
                    f"No pipeline run with this ID found."
                )

            # Update the pipeline run
            existing_run.update(run_update=run_update)
            session.add(existing_run)
            session.commit()

            session.refresh(existing_run)
            return existing_run.to_model()

    # ------------------
    # Pipeline run steps
    # ------------------

    def create_run_step(
        self, step: StepRunRequestModel
    ) -> StepRunResponseModel:
        """Creates a step.

        Args:
            step: The step to create.

        Returns:
            The created step.

        Raises:
            EntityExistsError: if the step already exists.
            KeyError: if the pipeline run doesn't exist.
        """
        with Session(self.engine) as session:

            # Check if the step already exists
            if step.mlmd_id is not None:
                existing_step = session.exec(
                    select(StepRunSchema).where(
                        StepRunSchema.mlmd_id == step.mlmd_id
                    )
                ).first()
                if existing_step is not None:
                    raise EntityExistsError(
                        f"Unable to create step '{step.name}': A step with "
                        f"MLMD ID '{step.mlmd_id}' already exists."
                    )

            # Check if the pipeline run exists
            run = session.exec(
                select(PipelineRunSchema).where(
                    PipelineRunSchema.id == step.pipeline_run_id
                )
            ).first()
            if run is None:
                raise KeyError(
                    f"Unable to create step '{step.name}': No pipeline run "
                    f"with ID '{step.pipeline_run_id}' found."
                )

            # Check if the step name already exists in the pipeline run
            existing_step = session.exec(
                select(StepRunSchema)
                .where(StepRunSchema.name == step.name)
                .where(StepRunSchema.pipeline_run_id == step.pipeline_run_id)
            ).first()
            if existing_step is not None:
                raise EntityExistsError(
                    f"Unable to create step '{step.name}': A step with this "
                    f"name already exists in the pipeline run with ID "
                    f"'{step.pipeline_run_id}'."
                )

            # Create the step
            step_schema = StepRunSchema.from_request(step)
            session.add(step_schema)
            session.commit()

            # Save parent step IDs into the database.
            for parent_step_id in step.parent_step_ids:
                self._set_run_step_parent_step(
                    child_id=step_schema.id, parent_id=parent_step_id
                )

            # Save input artifact IDs into the database.
            for input_name, artifact_id in step.input_artifacts.items():
                self._set_run_step_input_artifact(
                    step_id=step_schema.id,
                    artifact_id=artifact_id,
                    name=input_name,
                )

            return step_schema.to_model(
                parent_step_ids=step.parent_step_ids,
                mlmd_parent_step_ids=step.mlmd_parent_step_ids,
                input_artifacts=step.input_artifacts,
            )

    def _set_run_step_parent_step(
        self, child_id: UUID, parent_id: UUID
    ) -> None:
        """Sets the parent step for a step.

        Args:
            child_id: The ID of the child step to set the parent for.
            parent_id: The ID of the parent step to set a child for.

        Raises:
            KeyError: if the child step or parent step doesn't exist.
        """
        with Session(self.engine) as session:

            # Check if the child step exists.
            child_step = session.exec(
                select(StepRunSchema).where(StepRunSchema.id == child_id)
            ).first()
            if child_step is None:
                raise KeyError(
                    f"Unable to set parent step for step with ID "
                    f"{child_id}: No step with this ID found."
                )

            # Check if the parent step exists.
            parent_step = session.exec(
                select(StepRunSchema).where(StepRunSchema.id == parent_id)
            ).first()
            if parent_step is None:
                raise KeyError(
                    f"Unable to set parent step for step with ID "
                    f"{child_id}: No parent step with ID {parent_id} "
                    "found."
                )

            # Check if the parent step is already set.
            assignment = session.exec(
                select(StepRunParentsSchema)
                .where(StepRunParentsSchema.child_id == child_id)
                .where(StepRunParentsSchema.parent_id == parent_id)
            ).first()
            if assignment is not None:
                return

            # Save the parent step assignment in the database.
            assignment = StepRunParentsSchema(
                child_id=child_id, parent_id=parent_id
            )
            session.add(assignment)
            session.commit()

    def _set_run_step_input_artifact(
        self, step_id: UUID, artifact_id: UUID, name: str
    ) -> None:
        """Sets an artifact as an input of a step.

        Args:
            step_id: The ID of the step.
            artifact_id: The ID of the artifact.
            name: The name of the input in the step.

        Raises:
            KeyError: if the step or artifact doesn't exist.
        """
        with Session(self.engine) as session:

            # Check if the step exists.
            step = session.exec(
                select(StepRunSchema).where(StepRunSchema.id == step_id)
            ).first()
            if step is None:
                raise KeyError(
                    f"Unable to set input artifact: No step with ID "
                    f"'{step_id}' found."
                )

            # Check if the artifact exists.
            artifact = session.exec(
                select(ArtifactSchema).where(ArtifactSchema.id == artifact_id)
            ).first()
            if artifact is None:
                raise KeyError(
                    f"Unable to set input artifact: No artifact with ID "
                    f"'{artifact_id}' found."
                )

            # Check if the input is already set.
            assignment = session.exec(
                select(StepRunInputArtifactSchema)
                .where(StepRunInputArtifactSchema.step_id == step_id)
                .where(StepRunInputArtifactSchema.artifact_id == artifact_id)
            ).first()
            if assignment is not None:
                return

            # Save the input assignment in the database.
            assignment = StepRunInputArtifactSchema(
                step_id=step_id, artifact_id=artifact_id, name=name
            )
            session.add(assignment)
            session.commit()

    def get_run_step(self, step_id: UUID) -> StepRunResponseModel:
        """Get a step by ID.

        Args:
            step_id: The ID of the step to get.

        Returns:
            The step.

        Raises:
            KeyError: if the step doesn't exist.
        """
        if not self.runs_inside_server:
            self._sync_runs()
        with Session(self.engine) as session:
            step = session.exec(
                select(StepRunSchema).where(StepRunSchema.id == step_id)
            ).first()
            if step is None:
                raise KeyError(
                    f"Unable to get step with ID {step_id}: No step with this "
                    "ID found."
                )
            return self._run_step_schema_to_model(step)

    def _run_step_schema_to_model(
        self, step: StepRunSchema
    ) -> StepRunResponseModel:
        """Converts a run step schema to a step model.

        Args:
            step: The run step schema to convert.

        Returns:
            The run step model.
        """
        with Session(self.engine) as session:
            # Get parent steps.
            parent_steps = session.exec(
                select(StepRunSchema)
                .where(StepRunParentsSchema.child_id == step.id)
                .where(StepRunParentsSchema.parent_id == StepRunSchema.id)
            ).all()
            parent_step_ids = [parent_step.id for parent_step in parent_steps]
            mlmd_parent_step_ids = [
                parent_step.mlmd_id
                for parent_step in parent_steps
                if parent_step.mlmd_id is not None
            ]

            # Get input artifacts.
            input_artifact_list = session.exec(
                select(
                    StepRunInputArtifactSchema.artifact_id,
                    StepRunInputArtifactSchema.name,
                ).where(StepRunInputArtifactSchema.step_id == step.id)
            ).all()
            input_artifacts = {
                input_artifact[1]: input_artifact[0]
                for input_artifact in input_artifact_list
            }

            # Convert to model.
            return step.to_model(
                parent_step_ids=parent_step_ids,
                mlmd_parent_step_ids=mlmd_parent_step_ids,
                input_artifacts=input_artifacts,
            )

    def list_run_steps(
        self, run_id: Optional[UUID] = None
    ) -> List[StepRunResponseModel]:
        """Get all run steps.

        Args:
            run_id: If provided, only return steps for this pipeline run.

        Returns:
            A list of all run steps.
        """
        if not self.runs_inside_server:
            self._sync_runs()
        query = select(StepRunSchema)
        if run_id is not None:
            query = query.where(StepRunSchema.pipeline_run_id == run_id)
        with Session(self.engine) as session:
            steps = session.exec(query).all()
            return [self._run_step_schema_to_model(step) for step in steps]

    def update_run_step(
        self,
        step_id: UUID,
        step_update: StepRunUpdateModel,
    ) -> StepRunResponseModel:
        """Updates a step.

        Args:
            step_id: The ID of the step to update.
            step_update: The update to be applied to the step.

        Returns:
            The updated step.

        Raises:
            KeyError: if the step doesn't exist.
        """
        with Session(self.engine) as session:
            # Check if the step exists
            existing_step = session.exec(
                select(StepRunSchema).where(StepRunSchema.id == step_id)
            ).first()
            if existing_step is None:
                raise KeyError(
                    f"Unable to update step with ID {step_id}: "
                    f"No step with this ID found."
                )

            # Update the step
            existing_step.update(step_update)
            session.add(existing_step)
            session.commit()

            session.refresh(existing_step)

            return self._run_step_schema_to_model(existing_step)

    def get_run_step_inputs(
        self, step_id: UUID
    ) -> Dict[str, ArtifactResponseModel]:
        """Get the inputs for a specific step.

        Args:
            step_id: The id of the step to get inputs for.

        Returns:
            A dict mapping artifact names to the input artifacts for the step.

        Raises:
            KeyError: if the step doesn't exist.
        """
        with Session(self.engine) as session:
            step = session.exec(
                select(StepRunSchema).where(StepRunSchema.id == step_id)
            ).first()
            if step is None:
                raise KeyError(
                    f"Unable to get input artifacts for step with ID "
                    f"{step_id}: No step with this ID found."
                )
            query_result = session.exec(
                select(ArtifactSchema, StepRunInputArtifactSchema)
                .where(
                    ArtifactSchema.id == StepRunInputArtifactSchema.artifact_id
                )
                .where(StepRunInputArtifactSchema.step_id == step_id)
            ).all()
            return {
                step_input_artifact.name: artifact.to_model()
                for artifact, step_input_artifact in query_result
            }

    # ---------
    # Artifacts
    # ---------

    def create_artifact(
        self, artifact: ArtifactRequestModel
    ) -> ArtifactResponseModel:
        """Creates an artifact.

        Args:
            artifact: The artifact to create.

        Returns:
            The created artifact.

        Raises:
            KeyError: if the parent step doesn't exist.
            EntityExistsError: if the artifact already exists.
        """
        with Session(self.engine) as session:
            # Check if the step exists
            step = session.exec(
                select(StepRunSchema).where(
                    StepRunSchema.id == artifact.parent_step_id
                )
            ).first()
            if step is None:
                raise KeyError(
                    f"Unable to create artifact: Could not find parent step "
                    f"with ID '{artifact.parent_step_id}'."
                )

            # Check if the artifact already exists
            if artifact.mlmd_id is not None:
                existing_artifact = session.exec(
                    select(ArtifactSchema)
                    .where(ArtifactSchema.mlmd_id == artifact.mlmd_id)
                    .where(
                        ArtifactSchema.mlmd_parent_step_id
                        == artifact.mlmd_parent_step_id
                    )
                ).first()
                if existing_artifact is not None:
                    raise EntityExistsError(
                        f"Unable to create artifact: An artifact with MLMD ID "
                        f"'{artifact.mlmd_id}' already exists as output of "
                        f"step '{artifact.mlmd_parent_step_id}'."
                    )

            # Create the artifact

            artifact_schema = ArtifactSchema.from_request(artifact)
            session.add(artifact_schema)
            session.commit()
            return artifact_schema.to_model()

    def list_artifacts(
        self,
        artifact_uri: Optional[str] = None,
        parent_step_id: Optional[UUID] = None,
    ) -> List[ArtifactResponseModel]:
        """Lists all artifacts.

        Args:
            artifact_uri: If specified, only artifacts with the given URI will
                be returned.
            parent_step_id: If specified, only artifacts for the given step run
                will be returned.

        Returns:
            A list of all artifacts.
        """
        if not self.runs_inside_server:
            self._sync_runs()
        with Session(self.engine) as session:
            query = select(ArtifactSchema)
            if artifact_uri is not None:
                query = query.where(ArtifactSchema.uri == artifact_uri)
            if parent_step_id is not None:
                query = query.where(
                    ArtifactSchema.parent_step_id == parent_step_id
                )
            artifacts = session.exec(query).all()
            return [artifact.to_model() for artifact in artifacts]

    # =======================
    # Internal helper methods
    # =======================
    @staticmethod
    def _get_schema_by_name_or_id(
        object_name_or_id: Union[str, UUID],
        schema_class: Type[AnyNamedSchema],
        schema_name: str,
        session: Session,
    ) -> AnyNamedSchema:
        """Query a schema by its 'name' or 'id' field.

        Args:
            object_name_or_id: The name or ID of the object to query.
            schema_class: The schema class to query. E.g., `ProjectSchema`.
            schema_name: The name of the schema used for error messages.
                E.g., "project".
            session: The database session to use.

        Returns:
            The schema object.

        Raises:
            KeyError: if the object couldn't be found.
            ValueError: if the schema_name isn't provided.
        """
        if object_name_or_id is None:
            raise ValueError(
                f"Unable to get {schema_name}: No {schema_name} ID or name "
                "provided."
            )
        if uuid_utils.is_valid_uuid(object_name_or_id):
            filter_params = schema_class.id == object_name_or_id
            error_msg = (
                f"Unable to get {schema_name} with name or ID "
                f"'{object_name_or_id}': No {schema_name} with this ID found."
            )
        else:
            filter_params = schema_class.name == object_name_or_id
            error_msg = (
                f"Unable to get {schema_name} with name or ID "
                f"'{object_name_or_id}': '{object_name_or_id}' is not a valid "
                f" UUID and no {schema_name} with this name exists."
            )

        schema = session.exec(select(schema_class).where(filter_params)).first()

        if schema is None:
            raise KeyError(error_msg)
        return schema

    def _get_project_schema(
        self,
        project_name_or_id: Union[str, UUID],
        session: Session,
    ) -> ProjectSchema:
        """Gets a project schema by name or ID.

        This is a helper method that is used in various places to find the
        project associated to some other object.

        Args:
            project_name_or_id: The name or ID of the project to get.
            session: The database session to use.

        Returns:
            The project schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=project_name_or_id,
            schema_class=ProjectSchema,
            schema_name="project",
            session=session,
        )

    def _get_user_schema(
        self,
        user_name_or_id: Union[str, UUID],
        session: Session,
    ) -> UserSchema:
        """Gets a user schema by name or ID.

        This is a helper method that is used in various places to find the
        user associated to some other object.

        Args:
            user_name_or_id: The name or ID of the user to get.
            session: The database session to use.

        Returns:
            The user schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=user_name_or_id,
            schema_class=UserSchema,
            schema_name="user",
            session=session,
        )

    def _get_team_schema(
        self,
        team_name_or_id: Union[str, UUID],
        session: Session,
    ) -> TeamSchema:
        """Gets a team schema by name or ID.

        This is a helper method that is used in various places to find a team
        by its name or ID.

        Args:
            team_name_or_id: The name or ID of the team to get.
            session: The database session to use.

        Returns:
            The team schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=team_name_or_id,
            schema_class=TeamSchema,
            schema_name="team",
            session=session,
        )

    def _get_role_schema(
        self,
        role_name_or_id: Union[str, UUID],
        session: Session,
    ) -> RoleSchema:
        """Gets a role schema by name or ID.

        This is a helper method that is used in various places to find a role
        by its name or ID.

        Args:
            role_name_or_id: The name or ID of the role to get.
            session: The database session to use.

        Returns:
            The role schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=role_name_or_id,
            schema_class=RoleSchema,
            schema_name="role",
            session=session,
        )

    def _get_run_schema(
        self,
        run_name_or_id: Union[str, UUID],
        session: Session,
    ) -> PipelineRunSchema:
        """Gets a run schema by name or ID.

        This is a helper method that is used in various places to find a run
        by its name or ID.

        Args:
            run_name_or_id: The name or ID of the run to get.
            session: The database session to use.

        Returns:
            The run schema.
        """
        return self._get_schema_by_name_or_id(
            object_name_or_id=run_name_or_id,
            schema_class=PipelineRunSchema,
            schema_name="run",
            session=session,
        )

    # MLMD Stuff

    def _resolve_mlmd_step_id(self, mlmd_id: int) -> UUID:
        """Resolves a step ID from MLMD to a ZenML step ID.

        Args:
            mlmd_id: The MLMD ID of the step.

        Returns:
            The ZenML step ID.

        Raises:
            KeyError: if the step couldn't be found.
        """
        with Session(self.engine) as session:
            step = session.exec(
                select(StepRunSchema).where(StepRunSchema.mlmd_id == mlmd_id)
            ).first()
            if step is None:
                raise KeyError(
                    f"Unable to resolve MLMD step ID {mlmd_id}: "
                    f"No step with this ID found."
                )
            return step.id

    def _resolve_mlmd_artifact_id(
        self, mlmd_id: int, mlmd_parent_step_id: int
    ) -> UUID:
        """Resolves an artifact ID from MLMD to a ZenML artifact ID.

        Since a single MLMD artifact can map to multiple ZenML artifacts, we
        also need to know the parent step to resolve this correctly.

        Args:
            mlmd_id: The MLMD ID of the artifact.
            mlmd_parent_step_id: The MLMD ID of the parent step.

        Returns:
            The ZenML artifact ID.

        Raises:
            KeyError: if the artifact couldn't be found.
        """
        with Session(self.engine) as session:
            artifact = session.exec(
                select(ArtifactSchema)
                .where(ArtifactSchema.mlmd_id == mlmd_id)
                .where(
                    ArtifactSchema.mlmd_parent_step_id == mlmd_parent_step_id
                )
            ).first()
            if artifact is None:
                raise KeyError(
                    f"Unable to resolve MLMD artifact ID {mlmd_id}: "
                    f"No artifact with this ID found."
                )
            return artifact.id

    def _sync_runs(self) -> None:
        """Sync runs from MLMD into the database.

        This queries all runs from MLMD, checks for each whether it already
        exists in the database, and if not, creates it.
        """
        from zenml.zen_stores.migrations.alembic import AlembicVersion
        from zenml.zen_stores.rest_zen_store import DEFAULT_HTTP_TIMEOUT

        # This is to synchronize the locally running threads so that only
        # one thread attempts to sync the runs at any given time.
        # The timeout is set to be shorter than the default REST client
        # timeout, so that we don't block the client for too long.
        logger.debug(f"[{get_ident()}] Trying to acquire sync lock...")
        if not self.sync_lock.acquire(timeout=DEFAULT_HTTP_TIMEOUT - 10):
            logger.warning(
                f"[{get_ident()}] Timed out waiting to acquire pipeline "
                f"run sync lock. Skipping the sync this time around."
            )
            return

        logger.debug(f"[{get_ident()}] Pipeline run sync lock acquired.")
        try:
            with Session(self.engine) as session:
                logger.debug("Syncing pipeline runs...")
                if self.config.driver != SQLDatabaseDriver.SQLITE:
                    # This is to synchronize all server processes trying to
                    # sync the pipeline runs at the same time. We use the
                    # alembic version table as a shared resource that we can
                    # lock to prevent multiple processes from syncing runs
                    # at the same time.
                    session.query(AlembicVersion).with_for_update().all()
                self._sync_runs_with_lock(session)
                logger.debug("Pipeline runs sync complete")
        except Exception:
            logger.exception("Failed to sync pipeline runs.")
        finally:
            self.sync_lock.release()

    def _sync_runs_with_lock(self, session: Session) -> None:
        """Sync runs from MLMD into the database while the DB is locked.

        This queries all runs from MLMD, checks for each whether it already
        exists in the database, and if not, creates it.

        Args:
            session: The database session to use.
        """
        # Find all runs that already have an MLMD ID. These are already
        # synced and connected to MLMD, so we don't need to query them from
        # MLMD again.
        synced_mlmd_ids = session.exec(
            select(PipelineRunSchema.mlmd_id).where(
                isnot(PipelineRunSchema.mlmd_id, None)
            )
        ).all()
        logger.debug(f"Found {len(synced_mlmd_ids)} pipeline runs with MLMD ID")

        # Find all runs that have no MLMD ID. These might need to be
        # connected.
        runs_without_mlmd_id = session.exec(
            select(PipelineRunSchema).where(
                is_(PipelineRunSchema.mlmd_id, None)
            )
        ).all()
        logger.debug(
            f"Found {len(runs_without_mlmd_id)} pipeline runs without MLMD ID"
        )
        runs_without_mlmd_id_dict = {
            run_.name: run_ for run_ in runs_without_mlmd_id
        }

        # Sync all MLMD runs that don't exist in ZenML. For performance reasons,
        # we determine this by explicitly ignoring runs that are already synced.
        unsynced_mlmd_runs = self.metadata_store.get_all_runs(
            ignored_ids=[id_ for id_ in synced_mlmd_ids if id_ is not None]
        )
        logger.debug(
            f"Adding {len(unsynced_mlmd_runs)} new pipeline runs from MLMD"
        )
        for mlmd_run in unsynced_mlmd_runs:

            # If a run is written in both ZenML and MLMD but doesn't have an
            # MLMD ID set in the DB, we need to set it to connect the two.
            if mlmd_run.name in runs_without_mlmd_id_dict:
                run_model = runs_without_mlmd_id_dict[mlmd_run.name].to_model()
                run_model.mlmd_id = mlmd_run.mlmd_id
                try:
                    self.update_run(
                        run_id=run_model.id,
                        run_update=PipelineRunUpdateModel(
                            mlmd_id=mlmd_run.mlmd_id
                        ),
                    )
                except Exception as err:
                    logger.warning(
                        f"Syncing run '{mlmd_run.name}' failed: {str(err)}"
                    )
                    continue

            # Create runs that are in MLMD but not in the DB.
            else:
                try:
                    self._sync_run(mlmd_run)
                except EntityExistsError as exists_err:
                    logger.debug(
                        f"Run '{mlmd_run.name}' already exists: "
                        f"{str(exists_err)}. Skipping sync."
                    )
                    continue
                except Exception as err:
                    logger.warning(
                        f"Syncing run '{mlmd_run.name}' failed: {str(err)}"
                    )
                    continue

        # Sync steps and status of all unfinished runs.
        # We also filter out anything older than 1 week to prevent old broken
        # unfinished runs from being synced over and over again.
        unfinished_runs = session.exec(
            select(PipelineRunSchema)
            .where(PipelineRunSchema.status == ExecutionStatus.RUNNING)
            .where(
                PipelineRunSchema.updated >= datetime.now() - timedelta(weeks=1)
            )
        ).all()
        logger.debug(
            f"Updating {len(unfinished_runs)} unfinished pipeline runs from "
            "MLMD"
        )
        for run_ in unfinished_runs:
            try:
                logger.debug(f"Syncing run steps for pipeline run '{run_.id}'")
                self._sync_run_steps(run_.id)
                logger.debug(
                    f"Updating run status for pipeline run '{run_.id}'"
                )
                self._sync_run_status(run_.to_model())
            except Exception as err:
                logger.warning(f"Syncing run '{run_.name}' failed: {str(err)}")

        logger.debug("Pipeline runs sync complete.")

    def _sync_run(
        self, mlmd_run: "MLMDPipelineRunModel"
    ) -> PipelineRunResponseModel:
        """Sync a single run from MLMD into the database.

        Args:
            mlmd_run: The MLMD run model to sync.

        Returns:
            The synced run model.
        """
        new_run = PipelineRunRequestModel(
            name=mlmd_run.name,
            mlmd_id=mlmd_run.mlmd_id,
            project=mlmd_run.project or self._default_project.id,  # For legacy
            user=mlmd_run.user or self._default_user.id,  # For legacy
            stack=mlmd_run.stack_id,
            pipeline=mlmd_run.pipeline_id,
            pipeline_configuration=mlmd_run.pipeline_configuration,
            num_steps=mlmd_run.num_steps,
            status=ExecutionStatus.RUNNING,  # Update later.
        )
        return self.create_run(new_run)

    def _sync_run_steps(self, run_id: UUID) -> None:
        """Sync run steps from MLMD into the database.

        Since we do not allow to create steps in the database directly, this is
        a one-way sync from MLMD to the database.

        Args:
            run_id: The ID of the pipeline run to sync steps for.

        Raises:
            KeyError: if the run couldn't be found.
        """
        with Session(self.engine) as session:
            run = session.exec(
                select(PipelineRunSchema).where(PipelineRunSchema.id == run_id)
            ).first()

            # If the run doesn't exist, raise an error.
            if run is None:
                raise KeyError(
                    f"Unable to sync run steps for run with ID {run_id}: "
                    f"No run with this ID found."
                )

            # If the run didn't come from MLMD, we can't sync artifacts.
            if run.mlmd_id is None:
                return

            # Get all steps that already exist in the database.
            zenml_steps = session.exec(
                select(StepRunSchema).where(
                    StepRunSchema.pipeline_run_id == run_id
                )
            ).all()
            zenml_step_dict = {step.name: step for step in zenml_steps}

        # Get all steps from MLMD.
        mlmd_steps = self.metadata_store.get_pipeline_run_steps(run.mlmd_id)

        # For each step in MLMD, sync it into ZenML if it doesn't exist yet.
        for step_name, mlmd_step in mlmd_steps.items():
            if step_name not in zenml_step_dict:
                try:
                    step_model = self._sync_run_step(
                        run_id=run_id, step_name=step_name, mlmd_step=mlmd_step
                    )
                except EntityExistsError as exists_err:
                    logger.debug(
                        f"Run step '{step_name}' of run {run.name} already "
                        f"exists: {str(exists_err)}. Skipping sync."
                    )
                    continue
            else:
                step_schema = zenml_step_dict[step_name]
                step_model = self._run_step_schema_to_model(step_schema)

            # Sync artifacts and status of all unfinished steps.
            self._sync_run_step_artifacts(step_model)
            self._sync_run_step_status(step_model)

    def _sync_run_step(
        self, run_id: UUID, step_name: str, mlmd_step: "MLMDStepRunModel"
    ) -> StepRunResponseModel:
        """Sync a single run step from MLMD into the database.

        Args:
            run_id: The ID of the pipeline run to sync the step for.
            step_name: The name of the step to sync.
            mlmd_step: The MLMD step model to sync.

        Returns:
            The synced run step model.
        """
        # Build dict of input artifacts.
        mlmd_inputs = self.metadata_store.get_step_input_artifacts(
            step_id=mlmd_step.mlmd_id,
            step_parent_step_ids=mlmd_step.mlmd_parent_step_ids,
        )
        input_artifacts = {}
        for input_name, mlmd_artifact in mlmd_inputs.items():
            artifact_id = self._resolve_mlmd_artifact_id(
                mlmd_id=mlmd_artifact.mlmd_id,
                mlmd_parent_step_id=mlmd_artifact.mlmd_parent_step_id,
            )
            input_artifacts[input_name] = artifact_id

        # Create step.
        new_step = StepRunRequestModel(
            name=step_name,
            mlmd_id=mlmd_step.mlmd_id,
            mlmd_parent_step_ids=mlmd_step.mlmd_parent_step_ids,
            entrypoint_name=mlmd_step.entrypoint_name,
            parameters=mlmd_step.parameters,
            step_configuration=mlmd_step.step_configuration,
            docstring=mlmd_step.docstring,
            num_outputs=mlmd_step.num_outputs,
            pipeline_run_id=run_id,
            parent_step_ids=[
                self._resolve_mlmd_step_id(parent_step_id)
                for parent_step_id in mlmd_step.mlmd_parent_step_ids
            ],
            input_artifacts=input_artifacts,
            status=ExecutionStatus.RUNNING,  # Update later.
        )
        return self.create_run_step(new_step)

    def _sync_run_step_artifacts(
        self, step_model: StepRunResponseModel
    ) -> None:
        """Sync run step artifacts from MLMD into the database.

        Since we do not allow to create artifacts in the database directly, this
        is a one-way sync from MLMD to the database.

        Args:
            step_model: The model of the step run to sync artifacts for.
        """
        # If the step didn't come from MLMD, we can't sync artifacts.
        if step_model.mlmd_id is None:
            return

        # Get the names of all outputs that are already in ZenML.
        with Session(self.engine) as session:
            zenml_output_names = session.exec(
                select(ArtifactSchema.name).where(
                    ArtifactSchema.parent_step_id == step_model.id
                )
            ).all()

        # Get all MLMD output artifacts.
        mlmd_outputs = self.metadata_store.get_step_output_artifacts(
            step_id=step_model.mlmd_id
        )

        # For each output in MLMD, sync it into ZenML if it doesn't exist yet.
        for output_name, mlmd_artifact in mlmd_outputs.items():
            if output_name not in zenml_output_names:
                try:
                    self._sync_run_step_artifact(output_name, mlmd_artifact)
                except EntityExistsError as exists_err:
                    logger.debug(
                        f"Artifact {output_name} already exists: "
                        f"{str(exists_err)}. Skipping sync."
                    )
                    continue

    def _sync_run_step_artifact(
        self, output_name: str, mlmd_artifact: "MLMDArtifactModel"
    ) -> ArtifactResponseModel:
        """Sync a single run step artifact from MLMD into the database.

        Args:
            output_name: The name of the output artifact.
            mlmd_artifact: The MLMD artifact model to sync.

        Returns:
            The synced artifact model.
        """
        new_artifact = ArtifactRequestModel(
            name=output_name,
            mlmd_id=mlmd_artifact.mlmd_id,
            type=mlmd_artifact.type,
            uri=mlmd_artifact.uri,
            materializer=mlmd_artifact.materializer,
            data_type=mlmd_artifact.data_type,
            mlmd_parent_step_id=mlmd_artifact.mlmd_parent_step_id,
            mlmd_producer_step_id=mlmd_artifact.mlmd_producer_step_id,
            is_cached=mlmd_artifact.is_cached,
            parent_step_id=self._resolve_mlmd_step_id(
                mlmd_artifact.mlmd_parent_step_id
            ),
            producer_step_id=self._resolve_mlmd_step_id(
                mlmd_artifact.mlmd_producer_step_id
            ),
        )
        return self.create_artifact(new_artifact)

    def _sync_run_step_status(
        self, step_model: StepRunResponseModel
    ) -> StepRunResponseModel:
        """Updates the status of a step run model.

        In contrast to other update methods, this does not use the status of the
        model to overwrite the DB. Instead, the status is queried from MLMD.

        Args:
            step_model: The step run model to update.

        Returns:
            The step run model with updated status.
        """
        # Update status only if the step is running and has an MLMD ID.
        if (
            step_model.status != ExecutionStatus.RUNNING
            or step_model.mlmd_id is None
        ):
            return step_model

        # Check if all output artifacts have been synced.
        all_synced = True
        if step_model.num_outputs and step_model.num_outputs > 0:
            with Session(self.engine) as session:
                outputs = session.exec(
                    select(ArtifactSchema).where(
                        ArtifactSchema.parent_step_id == step_model.id
                    )
                ).all()
            if len(outputs) < step_model.num_outputs:
                all_synced = False

        # Get the status from MLMD and update the model if necessary.
        status = self.metadata_store.get_step_status(step_model.mlmd_id)
        is_failed = status == ExecutionStatus.FAILED
        is_done = status in (ExecutionStatus.COMPLETED, ExecutionStatus.CACHED)
        if is_failed or (is_done and all_synced):
            self.update_run_step(
                step_id=step_model.id,
                step_update=StepRunUpdateModel(status=status),
            )

        return step_model

    def _sync_run_status(
        self, run_model: PipelineRunResponseModel
    ) -> PipelineRunResponseModel:
        """Updates the status of a pipeline run model.

        In contrast to other update methods, this does not use the status of the
        model to overwrite the DB. Instead, the status is computed based on the
        status of each step, and if that is different from the status in the DB,
        the DB and model are both updated.

        Args:
            run_model: The pipeline run model to update.

        Returns:
            The pipeline run model with updated status.
        """
        # Update status only if the run is running.
        if run_model.status != ExecutionStatus.RUNNING:
            return run_model

        # Get all steps of the run.
        with Session(self.engine) as session:
            steps = session.exec(
                select(StepRunSchema).where(
                    StepRunSchema.pipeline_run_id == run_model.id
                )
            ).all()

        # Check if all steps have been synced.
        all_synced = True
        if run_model.num_steps and run_model.num_steps > 0:
            if len(steps) < run_model.num_steps:
                all_synced = False

        # Compute the status of the run based on the status of the steps and
        # update the model if necessary.
        status = ExecutionStatus.run_status([step.status for step in steps])
        is_failed = status == ExecutionStatus.FAILED
        is_done = status in (ExecutionStatus.COMPLETED, ExecutionStatus.CACHED)
        if is_failed or (is_done and all_synced):
            self.update_run(
                run_id=run_model.id,
                run_update=PipelineRunUpdateModel(status=status),
            )

        return run_model
active_user_name: str property readonly

Gets the active username.

Returns:

Type Description
str

The active username.

alembic: Alembic property readonly

The Alembic wrapper.

Returns:

Type Description
Alembic

The Alembic wrapper.

Exceptions:

Type Description
ValueError

If the store is not initialized.

engine: Engine property readonly

The SQLAlchemy engine.

Returns:

Type Description
Engine

The SQLAlchemy engine.

Exceptions:

Type Description
ValueError

If the store is not initialized.

metadata_store: MetadataStore property readonly

The metadata store.

Returns:

Type Description
MetadataStore

The metadata store.

Exceptions:

Type Description
ValueError

If the store is not initialized.

runs_inside_server: bool property readonly

Whether the store is running inside a server.

Returns:

Type Description
bool

Whether the store is running inside a server.

sync_lock: <built-in function allocate_lock> property readonly

The mutex used to synchronize pipeline runs.

Returns:

Type Description
<built-in function allocate_lock>

The mutex used to synchronize pipeline runs.

Exceptions:

Type Description
ValueError

If the store is not initialized.

CONFIG_TYPE (StoreConfiguration) pydantic-model

SQL ZenML store configuration.

Attributes:

Name Type Description
type StoreType

The type of the store.

driver Optional[zenml.zen_stores.sql_zen_store.SQLDatabaseDriver]

The SQL database driver.

database Optional[str]

database name. If not already present on the server, it will be created automatically on first access.

username Optional[str]

The database username.

password Optional[str]

The database password.

ssl_ca Optional[str]

certificate authority certificate. Required for SSL enabled authentication if the CA certificate is not part of the certificates shipped by the operating system.

ssl_cert Optional[str]

client certificate. Required for SSL enabled authentication if client certificates are used.

ssl_key Optional[str]

client certificate private key. Required for SSL enabled if client certificates are used.

ssl_verify_server_cert bool

set to verify the identity of the server against the provided server certificate.

pool_size int

The maximum number of connections to keep in the SQLAlchemy pool.

max_overflow int

The maximum number of connections to allow in the SQLAlchemy pool in addition to the pool_size.

grpc_metadata_host Optional[str]

The host to use for the gRPC metadata server.

grpc_metadata_port Optional[int]

The port to use for the gRPC metadata server.

grpc_metadata_ssl_ca Optional[str]

The certificate authority certificate to use for the gRPC metadata server connection.

grpc_metadata_ssl_cert Optional[str]

The client certificate to use for the gRPC metadata server connection.

grpc_metadata_ssl_key Optional[str]

The client certificate private key to use for the gRPC metadata server connection.

Source code in zenml/zen_stores/sql_zen_store.py
class SqlZenStoreConfiguration(StoreConfiguration):
    """SQL ZenML store configuration.

    Attributes:
        type: The type of the store.
        driver: The SQL database driver.
        database: database name. If not already present on the server, it will
            be created automatically on first access.
        username: The database username.
        password: The database password.
        ssl_ca: certificate authority certificate. Required for SSL
            enabled authentication if the CA certificate is not part of the
            certificates shipped by the operating system.
        ssl_cert: client certificate. Required for SSL enabled
            authentication if client certificates are used.
        ssl_key: client certificate private key. Required for SSL
            enabled if client certificates are used.
        ssl_verify_server_cert: set to verify the identity of the server
            against the provided server certificate.
        pool_size: The maximum number of connections to keep in the SQLAlchemy
            pool.
        max_overflow: The maximum number of connections to allow in the
            SQLAlchemy pool in addition to the pool_size.
        grpc_metadata_host: The host to use for the gRPC metadata server.
        grpc_metadata_port: The port to use for the gRPC metadata server.
        grpc_metadata_ssl_ca: The certificate authority certificate to use for
            the gRPC metadata server connection.
        grpc_metadata_ssl_cert: The client certificate to use for the gRPC
            metadata server connection.
        grpc_metadata_ssl_key: The client certificate private key to use for
            the gRPC metadata server connection.
    """

    type: StoreType = StoreType.SQL

    driver: Optional[SQLDatabaseDriver] = None
    database: Optional[str] = None
    username: Optional[str] = None
    password: Optional[str] = None
    ssl_ca: Optional[str] = None
    ssl_cert: Optional[str] = None
    ssl_key: Optional[str] = None
    ssl_verify_server_cert: bool = False
    pool_size: int = 20
    max_overflow: int = 20

    grpc_metadata_host: Optional[str] = None
    grpc_metadata_port: Optional[int] = None
    grpc_metadata_ssl_ca: Optional[str] = None
    grpc_metadata_ssl_key: Optional[str] = None
    grpc_metadata_ssl_cert: Optional[str] = None

    @root_validator
    def _validate_url(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        """Validate the SQL URL.

        The validator also moves the MySQL username, password and database
        parameters from the URL into the other configuration arguments, if they
        are present in the URL.

        Args:
            values: The values to validate.

        Returns:
            The validated values.

        Raises:
            ValueError: If the URL is invalid or the SQL driver is not
                supported.
        """
        # flake8: noqa: C901
        url = values.get("url")
        if url is None:
            return values

        # When running inside a container, if the URL uses localhost, the
        # target service will not be available. We try to replace localhost
        # with one of the special Docker or K3D internal hostnames.
        url = replace_localhost_with_internal_hostname(url)

        try:
            sql_url = make_url(url)
        except ArgumentError as e:
            raise ValueError(
                "Invalid SQL URL `%s`: %s. The URL must be in the format "
                "`driver://[[username:password@]hostname:port]/database["
                "?<extra-args>]`.",
                url,
                str(e),
            )

        if sql_url.drivername not in SQLDatabaseDriver.values():
            raise ValueError(
                "Invalid SQL driver value `%s`: The driver must be one of: %s.",
                url,
                ", ".join(SQLDatabaseDriver.values()),
            )
        values["driver"] = SQLDatabaseDriver(sql_url.drivername)
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            if (
                sql_url.username
                or sql_url.password
                or sql_url.query
                or sql_url.database is None
            ):
                raise ValueError(
                    "Invalid SQLite URL `%s`: The URL must be in the "
                    "format `sqlite:///path/to/database.db`.",
                    url,
                )
            if values.get("username") or values.get("password"):
                raise ValueError(
                    "Invalid SQLite configuration: The username and password "
                    "must not be set",
                    url,
                )
            values["database"] = sql_url.database
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            if sql_url.username:
                values["username"] = sql_url.username
                sql_url = sql_url._replace(username=None)
            if sql_url.password:
                values["password"] = sql_url.password
                sql_url = sql_url._replace(password=None)
            if sql_url.database:
                values["database"] = sql_url.database
                sql_url = sql_url._replace(database=None)
            if sql_url.query:
                for k, v in sql_url.query.items():
                    if k == "ssl_ca":
                        values["ssl_ca"] = v
                    elif k == "ssl_cert":
                        values["ssl_cert"] = v
                    elif k == "ssl_key":
                        values["ssl_key"] = v
                    elif k == "ssl_verify_server_cert":
                        values["ssl_verify_server_cert"] = v
                    else:
                        raise ValueError(
                            "Invalid MySQL URL query parameter `%s`: The "
                            "parameter must be one of: ssl_ca, ssl_cert, "
                            "ssl_key, or ssl_verify_server_cert.",
                            k,
                        )
                sql_url = sql_url._replace(query={})

            database = values.get("database")
            if (
                not values.get("username")
                or not values.get("password")
                or not database
            ):
                raise ValueError(
                    "Invalid MySQL configuration: The username, password and "
                    "database must be set in the URL or as configuration "
                    "attributes",
                )

            regexp = r"^[^\\/?%*:|\"<>.-]{1,64}$"
            match = re.match(regexp, database)
            if not match:
                raise ValueError(
                    f"The database name does not conform to the required "
                    f"format "
                    f"rules ({regexp}): {database}"
                )

            # Save the certificates in a secure location on disk
            secret_folder = Path(
                GlobalConfiguration().local_stores_path,
                "certificates",
            )
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                content = values.get(key)
                if content and not os.path.isfile(content):
                    fileio.makedirs(str(secret_folder))
                    file_path = Path(secret_folder, f"{key}.pem")
                    with open(file_path, "w") as f:
                        f.write(content)
                    file_path.chmod(0o600)
                    values[key] = str(file_path)

        values["url"] = str(sql_url)
        return values

    @staticmethod
    def get_local_url(path: str) -> str:
        """Get a local SQL url for a given local path.

        Args:
            path: The path to the local sqlite file.

        Returns:
            The local SQL url for the given path.
        """
        return f"sqlite:///{path}/{ZENML_SQLITE_DB_FILENAME}"

    @classmethod
    def supports_url_scheme(cls, url: str) -> bool:
        """Check if a URL scheme is supported by this store.

        Args:
            url: The URL to check.

        Returns:
            True if the URL scheme is supported, False otherwise.
        """
        return make_url(url).drivername in SQLDatabaseDriver.values()

    def expand_certificates(self) -> None:
        """Expands the certificates in the verify_ssl field."""
        # Load the certificate values back into the configuration
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            file_path = getattr(self, key, None)
            if file_path and os.path.isfile(file_path):
                with open(file_path, "r") as f:
                    setattr(self, key, f.read())

    @classmethod
    def copy_configuration(
        cls,
        config: "StoreConfiguration",
        config_path: str,
        load_config_path: Optional[PurePath] = None,
    ) -> "StoreConfiguration":
        """Copy the store config using a different configuration path.

        This method is used to create a copy of the store configuration that can
        be loaded using a different configuration path or in the context of a
        new environment, such as a container image.

        The configuration files accompanying the store configuration are also
        copied to the new configuration path (e.g. certificates etc.).

        Args:
            config: The store configuration to copy.
            config_path: new path where the configuration copy will be loaded
                from.
            load_config_path: absolute path that will be used to load the copied
                configuration. This can be set to a value different from
                `config_path` if the configuration copy will be loaded from
                a different environment, e.g. when the configuration is copied
                to a container image and loaded using a different absolute path.
                This will be reflected in the paths and URLs encoded in the
                copied configuration.

        Returns:
            A new store configuration object that reflects the new configuration
            path.
        """
        assert isinstance(config, SqlZenStoreConfiguration)
        config = config.copy()

        if config.driver == SQLDatabaseDriver.MYSQL:
            # Load the certificate values back into the configuration
            config.expand_certificates()

        elif config.driver == SQLDatabaseDriver.SQLITE:
            if load_config_path:
                config.url = cls.get_local_url(str(load_config_path))
            else:
                config.url = cls.get_local_url(config_path)

        return config

    def get_metadata_config(
        self, expand_certs: bool = False
    ) -> "ConnectionConfig":
        """Get the metadata configuration for the SQL ZenML store.

        Args:
            expand_certs: Whether to expand the certificate paths to their
                contents.

        Returns:
            The metadata configuration.

        Raises:
            NotImplementedError: If the SQL driver is not supported.
        """
        from ml_metadata.proto.metadata_store_pb2 import MySQLDatabaseConfig
        from tfx.orchestration import metadata

        sql_url = make_url(self.url)
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            assert self.database is not None
            mlmd_config = metadata.sqlite_metadata_connection_config(
                self.database
            )
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            # all these are guaranteed by our root validator
            assert self.database is not None
            assert self.username is not None
            assert self.password is not None
            assert sql_url.host is not None

            mlmd_config = metadata.mysql_metadata_connection_config(
                host=sql_url.host,
                port=sql_url.port or 3306,
                database=self.database,
                username=self.username,
                password=self.password,
            )

            mlmd_ssl_options = {}
            # Handle certificate params
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                ssl_setting = getattr(self, key)
                if not ssl_setting:
                    continue
                if expand_certs and os.path.isfile(ssl_setting):
                    with open(ssl_setting, "r") as f:
                        ssl_setting = f.read()
                mlmd_ssl_options[key.lstrip("ssl_")] = ssl_setting

            # Handle additional params
            if mlmd_ssl_options:
                mlmd_ssl_options[
                    "verify_server_cert"
                ] = self.ssl_verify_server_cert
                mlmd_config.mysql.ssl_options.CopyFrom(
                    MySQLDatabaseConfig.SSLOptions(**mlmd_ssl_options)
                )
        else:
            raise NotImplementedError(
                f"SQL driver `{sql_url.drivername}` is not supported."
            )

        return mlmd_config

    def get_sqlmodel_config(self) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
        """Get the SQLModel engine configuration for the SQL ZenML store.

        Returns:
            The URL and connection arguments for the SQLModel engine.

        Raises:
            NotImplementedError: If the SQL driver is not supported.
        """
        sql_url = make_url(self.url)
        sqlalchemy_connect_args: Dict[str, Any] = {}
        engine_args = {}
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            assert self.database is not None
            # The following default value is needed for sqlite to avoid the
            # Error:
            #   sqlite3.ProgrammingError: SQLite objects created in a thread can
            #   only be used in that same thread.
            sqlalchemy_connect_args = {"check_same_thread": False}
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            # all these are guaranteed by our root validator
            assert self.database is not None
            assert self.username is not None
            assert self.password is not None
            assert sql_url.host is not None

            engine_args = {
                "pool_size": self.pool_size,
                "max_overflow": self.max_overflow,
            }

            sql_url = sql_url._replace(
                drivername="mysql+pymysql",
                username=self.username,
                password=self.password,
                database=self.database,
            )

            sqlalchemy_ssl_args: Dict[str, Any] = {}

            # Handle SSL params
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                ssl_setting = getattr(self, key)
                if not ssl_setting:
                    continue
                if not os.path.isfile(ssl_setting):
                    logger.warning(
                        f"Database SSL setting `{key}` is not a file. "
                    )
                sqlalchemy_ssl_args[key.lstrip("ssl_")] = ssl_setting
            if len(sqlalchemy_ssl_args) > 0:
                sqlalchemy_ssl_args[
                    "check_hostname"
                ] = self.ssl_verify_server_cert
                sqlalchemy_connect_args["ssl"] = sqlalchemy_ssl_args
        else:
            raise NotImplementedError(
                f"SQL driver `{sql_url.drivername}` is not supported."
            )

        return str(sql_url), sqlalchemy_connect_args, engine_args

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the certificate attributes can be expanded to the contents
        # of the certificate files.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/sql_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the certificate attributes can be expanded to the contents
    # of the certificate files.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
copy_configuration(config, config_path, load_config_path=None) classmethod

Copy the store config using a different configuration path.

This method is used to create a copy of the store configuration that can be loaded using a different configuration path or in the context of a new environment, such as a container image.

The configuration files accompanying the store configuration are also copied to the new configuration path (e.g. certificates etc.).

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to copy.

required
config_path str

new path where the configuration copy will be loaded from.

required
load_config_path Optional[pathlib.PurePath]

absolute path that will be used to load the copied configuration. This can be set to a value different from config_path if the configuration copy will be loaded from a different environment, e.g. when the configuration is copied to a container image and loaded using a different absolute path. This will be reflected in the paths and URLs encoded in the copied configuration.

None

Returns:

Type Description
StoreConfiguration

A new store configuration object that reflects the new configuration path.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def copy_configuration(
    cls,
    config: "StoreConfiguration",
    config_path: str,
    load_config_path: Optional[PurePath] = None,
) -> "StoreConfiguration":
    """Copy the store config using a different configuration path.

    This method is used to create a copy of the store configuration that can
    be loaded using a different configuration path or in the context of a
    new environment, such as a container image.

    The configuration files accompanying the store configuration are also
    copied to the new configuration path (e.g. certificates etc.).

    Args:
        config: The store configuration to copy.
        config_path: new path where the configuration copy will be loaded
            from.
        load_config_path: absolute path that will be used to load the copied
            configuration. This can be set to a value different from
            `config_path` if the configuration copy will be loaded from
            a different environment, e.g. when the configuration is copied
            to a container image and loaded using a different absolute path.
            This will be reflected in the paths and URLs encoded in the
            copied configuration.

    Returns:
        A new store configuration object that reflects the new configuration
        path.
    """
    assert isinstance(config, SqlZenStoreConfiguration)
    config = config.copy()

    if config.driver == SQLDatabaseDriver.MYSQL:
        # Load the certificate values back into the configuration
        config.expand_certificates()

    elif config.driver == SQLDatabaseDriver.SQLITE:
        if load_config_path:
            config.url = cls.get_local_url(str(load_config_path))
        else:
            config.url = cls.get_local_url(config_path)

    return config
expand_certificates(self)

Expands the certificates in the verify_ssl field.

Source code in zenml/zen_stores/sql_zen_store.py
def expand_certificates(self) -> None:
    """Expands the certificates in the verify_ssl field."""
    # Load the certificate values back into the configuration
    for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
        file_path = getattr(self, key, None)
        if file_path and os.path.isfile(file_path):
            with open(file_path, "r") as f:
                setattr(self, key, f.read())
get_local_url(path) staticmethod

Get a local SQL url for a given local path.

Parameters:

Name Type Description Default
path str

The path to the local sqlite file.

required

Returns:

Type Description
str

The local SQL url for the given path.

Source code in zenml/zen_stores/sql_zen_store.py
@staticmethod
def get_local_url(path: str) -> str:
    """Get a local SQL url for a given local path.

    Args:
        path: The path to the local sqlite file.

    Returns:
        The local SQL url for the given path.
    """
    return f"sqlite:///{path}/{ZENML_SQLITE_DB_FILENAME}"
get_metadata_config(self, expand_certs=False)

Get the metadata configuration for the SQL ZenML store.

Parameters:

Name Type Description Default
expand_certs bool

Whether to expand the certificate paths to their contents.

False

Returns:

Type Description
ConnectionConfig

The metadata configuration.

Exceptions:

Type Description
NotImplementedError

If the SQL driver is not supported.

Source code in zenml/zen_stores/sql_zen_store.py
def get_metadata_config(
    self, expand_certs: bool = False
) -> "ConnectionConfig":
    """Get the metadata configuration for the SQL ZenML store.

    Args:
        expand_certs: Whether to expand the certificate paths to their
            contents.

    Returns:
        The metadata configuration.

    Raises:
        NotImplementedError: If the SQL driver is not supported.
    """
    from ml_metadata.proto.metadata_store_pb2 import MySQLDatabaseConfig
    from tfx.orchestration import metadata

    sql_url = make_url(self.url)
    if sql_url.drivername == SQLDatabaseDriver.SQLITE:
        assert self.database is not None
        mlmd_config = metadata.sqlite_metadata_connection_config(
            self.database
        )
    elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
        # all these are guaranteed by our root validator
        assert self.database is not None
        assert self.username is not None
        assert self.password is not None
        assert sql_url.host is not None

        mlmd_config = metadata.mysql_metadata_connection_config(
            host=sql_url.host,
            port=sql_url.port or 3306,
            database=self.database,
            username=self.username,
            password=self.password,
        )

        mlmd_ssl_options = {}
        # Handle certificate params
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            ssl_setting = getattr(self, key)
            if not ssl_setting:
                continue
            if expand_certs and os.path.isfile(ssl_setting):
                with open(ssl_setting, "r") as f:
                    ssl_setting = f.read()
            mlmd_ssl_options[key.lstrip("ssl_")] = ssl_setting

        # Handle additional params
        if mlmd_ssl_options:
            mlmd_ssl_options[
                "verify_server_cert"
            ] = self.ssl_verify_server_cert
            mlmd_config.mysql.ssl_options.CopyFrom(
                MySQLDatabaseConfig.SSLOptions(**mlmd_ssl_options)
            )
    else:
        raise NotImplementedError(
            f"SQL driver `{sql_url.drivername}` is not supported."
        )

    return mlmd_config
get_sqlmodel_config(self)

Get the SQLModel engine configuration for the SQL ZenML store.

Returns:

Type Description
Tuple[str, Dict[str, Any], Dict[str, Any]]

The URL and connection arguments for the SQLModel engine.

Exceptions:

Type Description
NotImplementedError

If the SQL driver is not supported.

Source code in zenml/zen_stores/sql_zen_store.py
def get_sqlmodel_config(self) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
    """Get the SQLModel engine configuration for the SQL ZenML store.

    Returns:
        The URL and connection arguments for the SQLModel engine.

    Raises:
        NotImplementedError: If the SQL driver is not supported.
    """
    sql_url = make_url(self.url)
    sqlalchemy_connect_args: Dict[str, Any] = {}
    engine_args = {}
    if sql_url.drivername == SQLDatabaseDriver.SQLITE:
        assert self.database is not None
        # The following default value is needed for sqlite to avoid the
        # Error:
        #   sqlite3.ProgrammingError: SQLite objects created in a thread can
        #   only be used in that same thread.
        sqlalchemy_connect_args = {"check_same_thread": False}
    elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
        # all these are guaranteed by our root validator
        assert self.database is not None
        assert self.username is not None
        assert self.password is not None
        assert sql_url.host is not None

        engine_args = {
            "pool_size": self.pool_size,
            "max_overflow": self.max_overflow,
        }

        sql_url = sql_url._replace(
            drivername="mysql+pymysql",
            username=self.username,
            password=self.password,
            database=self.database,
        )

        sqlalchemy_ssl_args: Dict[str, Any] = {}

        # Handle SSL params
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            ssl_setting = getattr(self, key)
            if not ssl_setting:
                continue
            if not os.path.isfile(ssl_setting):
                logger.warning(
                    f"Database SSL setting `{key}` is not a file. "
                )
            sqlalchemy_ssl_args[key.lstrip("ssl_")] = ssl_setting
        if len(sqlalchemy_ssl_args) > 0:
            sqlalchemy_ssl_args[
                "check_hostname"
            ] = self.ssl_verify_server_cert
            sqlalchemy_connect_args["ssl"] = sqlalchemy_ssl_args
    else:
        raise NotImplementedError(
            f"SQL driver `{sql_url.drivername}` is not supported."
        )

    return str(sql_url), sqlalchemy_connect_args, engine_args
supports_url_scheme(url) classmethod

Check if a URL scheme is supported by this store.

Parameters:

Name Type Description Default
url str

The URL to check.

required

Returns:

Type Description
bool

True if the URL scheme is supported, False otherwise.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def supports_url_scheme(cls, url: str) -> bool:
    """Check if a URL scheme is supported by this store.

    Args:
        url: The URL to check.

    Returns:
        True if the URL scheme is supported, False otherwise.
    """
    return make_url(url).drivername in SQLDatabaseDriver.values()
create_artifact(self, artifact)

Creates an artifact.

Parameters:

Name Type Description Default
artifact ArtifactRequestModel

The artifact to create.

required

Returns:

Type Description
ArtifactResponseModel

The created artifact.

Exceptions:

Type Description
KeyError

if the parent step doesn't exist.

EntityExistsError

if the artifact already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def create_artifact(
    self, artifact: ArtifactRequestModel
) -> ArtifactResponseModel:
    """Creates an artifact.

    Args:
        artifact: The artifact to create.

    Returns:
        The created artifact.

    Raises:
        KeyError: if the parent step doesn't exist.
        EntityExistsError: if the artifact already exists.
    """
    with Session(self.engine) as session:
        # Check if the step exists
        step = session.exec(
            select(StepRunSchema).where(
                StepRunSchema.id == artifact.parent_step_id
            )
        ).first()
        if step is None:
            raise KeyError(
                f"Unable to create artifact: Could not find parent step "
                f"with ID '{artifact.parent_step_id}'."
            )

        # Check if the artifact already exists
        if artifact.mlmd_id is not None:
            existing_artifact = session.exec(
                select(ArtifactSchema)
                .where(ArtifactSchema.mlmd_id == artifact.mlmd_id)
                .where(
                    ArtifactSchema.mlmd_parent_step_id
                    == artifact.mlmd_parent_step_id
                )
            ).first()
            if existing_artifact is not None:
                raise EntityExistsError(
                    f"Unable to create artifact: An artifact with MLMD ID "
                    f"'{artifact.mlmd_id}' already exists as output of "
                    f"step '{artifact.mlmd_parent_step_id}'."
                )

        # Create the artifact

        artifact_schema = ArtifactSchema.from_request(artifact)
        session.add(artifact_schema)
        session.commit()
        return artifact_schema.to_model()
create_flavor(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_pipeline(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_project(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_role(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_role_assignment(self, role_assignment)

Assigns a role to a user or team, scoped to a specific project.

Parameters:

Name Type Description Default
role_assignment RoleAssignmentRequestModel

The role assignment to create.

required

Returns:

Type Description
RoleAssignmentResponseModel

The created role assignment.

Exceptions:

Type Description
ValueError

If neither a user nor a team is specified.

Source code in zenml/zen_stores/sql_zen_store.py
def create_role_assignment(
    self, role_assignment: RoleAssignmentRequestModel
) -> RoleAssignmentResponseModel:
    """Assigns a role to a user or team, scoped to a specific project.

    Args:
        role_assignment: The role assignment to create.

    Returns:
        The created role assignment.

    Raises:
        ValueError: If neither a user nor a team is specified.
    """
    if role_assignment.user:
        return self._assign_role_to_user(
            role_name_or_id=role_assignment.role,
            user_name_or_id=role_assignment.user,
            project_name_or_id=role_assignment.project,
        )
    if role_assignment.team:
        return self._assign_role_to_team(
            role_name_or_id=role_assignment.role,
            team_name_or_id=role_assignment.team,
            project_name_or_id=role_assignment.project,
        )
    raise ValueError(
        "Role assignment must be assigned to either a user or a team."
    )
create_run(self, pipeline_run)

Creates a pipeline run.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to create.

required

Returns:

Type Description
PipelineRunResponseModel

The created pipeline run.

Exceptions:

Type Description
EntityExistsError

If an identical pipeline run already exists.

Source code in zenml/zen_stores/sql_zen_store.py
def create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Creates a pipeline run.

    Args:
        pipeline_run: The pipeline run to create.

    Returns:
        The created pipeline run.

    Raises:
        EntityExistsError: If an identical pipeline run already exists.
    """
    with Session(self.engine) as session:

        # Check if pipeline run with same name already exists.
        existing_domain_run = session.exec(
            select(PipelineRunSchema).where(
                PipelineRunSchema.name == pipeline_run.name
            )
        ).first()
        if existing_domain_run is not None:
            raise EntityExistsError(
                f"Unable to create pipeline run: A pipeline run with name "
                f"'{pipeline_run.name}' already exists."
            )

        # Check if pipeline run with same ID already exists.
        existing_id_run = session.exec(
            select(PipelineRunSchema).where(
                PipelineRunSchema.id == pipeline_run.id
            )
        ).first()
        if existing_id_run is not None:
            raise EntityExistsError(
                f"Unable to create pipeline run: A pipeline run with ID "
                f"'{pipeline_run.id}' already exists."
            )

        # Check if pipeline run with same name MLMD ID already exists.
        if pipeline_run.mlmd_id is not None:
            existing_mlmd_id_run = session.exec(
                select(PipelineRunSchema).where(
                    PipelineRunSchema.mlmd_id == pipeline_run.mlmd_id
                )
            ).first()
            if existing_mlmd_id_run is not None:
                raise EntityExistsError(
                    f"Unable to create pipeline run: A pipeline run with "
                    f"MLMD ID '{pipeline_run.mlmd_id}' already exists."
                )

        # Query stack to ensure it exists in the DB
        stack_id = None
        if pipeline_run.stack is not None:
            stack_id = session.exec(
                select(StackSchema.id).where(
                    StackSchema.id == pipeline_run.stack
                )
            ).first()
            if stack_id is None:
                logger.warning(
                    f"No stack found for this run. "
                    f"Creating pipeline run '{pipeline_run.name}' without "
                    "linked stack."
                )

        # Query pipeline to ensure it exists in the DB
        pipeline_id = None
        if pipeline_run.pipeline is not None:
            pipeline_id = session.exec(
                select(PipelineSchema.id).where(
                    PipelineSchema.id == pipeline_run.pipeline
                )
            ).first()
            if pipeline_id is None:
                logger.warning(
                    f"No pipeline found. Creating pipeline run "
                    f"'{pipeline_run.name}' as unlisted run."
                )

        configuration = json.dumps(pipeline_run.pipeline_configuration)

        new_run = PipelineRunSchema(
            id=pipeline_run.id,
            name=pipeline_run.name,
            orchestrator_run_id=pipeline_run.orchestrator_run_id,
            stack_id=stack_id,
            project_id=pipeline_run.project,
            user_id=pipeline_run.user,
            pipeline_id=pipeline_id,
            status=pipeline_run.status,
            pipeline_configuration=configuration,
            num_steps=pipeline_run.num_steps,
            git_sha=pipeline_run.git_sha,
            zenml_version=pipeline_run.zenml_version,
            mlmd_id=pipeline_run.mlmd_id,
        )

        # Create the pipeline run
        session.add(new_run)
        session.commit()

        return new_run.to_model()
create_run_step(self, step)

Creates a step.

Parameters:

Name Type Description Default
step StepRunRequestModel

The step to create.

required

Returns:

Type Description
StepRunResponseModel

The created step.

Exceptions:

Type Description
EntityExistsError

if the step already exists.

KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def create_run_step(
    self, step: StepRunRequestModel
) -> StepRunResponseModel:
    """Creates a step.

    Args:
        step: The step to create.

    Returns:
        The created step.

    Raises:
        EntityExistsError: if the step already exists.
        KeyError: if the pipeline run doesn't exist.
    """
    with Session(self.engine) as session:

        # Check if the step already exists
        if step.mlmd_id is not None:
            existing_step = session.exec(
                select(StepRunSchema).where(
                    StepRunSchema.mlmd_id == step.mlmd_id
                )
            ).first()
            if existing_step is not None:
                raise EntityExistsError(
                    f"Unable to create step '{step.name}': A step with "
                    f"MLMD ID '{step.mlmd_id}' already exists."
                )

        # Check if the pipeline run exists
        run = session.exec(
            select(PipelineRunSchema).where(
                PipelineRunSchema.id == step.pipeline_run_id
            )
        ).first()
        if run is None:
            raise KeyError(
                f"Unable to create step '{step.name}': No pipeline run "
                f"with ID '{step.pipeline_run_id}' found."
            )

        # Check if the step name already exists in the pipeline run
        existing_step = session.exec(
            select(StepRunSchema)
            .where(StepRunSchema.name == step.name)
            .where(StepRunSchema.pipeline_run_id == step.pipeline_run_id)
        ).first()
        if existing_step is not None:
            raise EntityExistsError(
                f"Unable to create step '{step.name}': A step with this "
                f"name already exists in the pipeline run with ID "
                f"'{step.pipeline_run_id}'."
            )

        # Create the step
        step_schema = StepRunSchema.from_request(step)
        session.add(step_schema)
        session.commit()

        # Save parent step IDs into the database.
        for parent_step_id in step.parent_step_ids:
            self._set_run_step_parent_step(
                child_id=step_schema.id, parent_id=parent_step_id
            )

        # Save input artifact IDs into the database.
        for input_name, artifact_id in step.input_artifacts.items():
            self._set_run_step_input_artifact(
                step_id=step_schema.id,
                artifact_id=artifact_id,
                name=input_name,
            )

        return step_schema.to_model(
            parent_step_ids=step.parent_step_ids,
            mlmd_parent_step_ids=step.mlmd_parent_step_ids,
            input_artifacts=step.input_artifacts,
        )
create_stack(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_stack_component(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_team(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
create_user(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_flavor(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_pipeline(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_project(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_role(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_role_assignment(self, role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
role_assignment_id UUID

The ID of the specific role assignment.

required

Exceptions:

Type Description
KeyError

If the role assignment does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def delete_role_assignment(self, role_assignment_id: UUID) -> None:
    """Delete a specific role assignment.

    Args:
        role_assignment_id: The ID of the specific role assignment.

    Raises:
        KeyError: If the role assignment does not exist.
    """
    with Session(self.engine) as session:
        user_role = session.exec(
            select(UserRoleAssignmentSchema).where(
                UserRoleAssignmentSchema.id == role_assignment_id
            )
        ).one_or_none()
        if user_role:
            session.delete(user_role)

        team_role = session.exec(
            select(TeamRoleAssignmentSchema).where(
                TeamRoleAssignmentSchema.id == role_assignment_id
            )
        ).one_or_none()

        if team_role:
            session.delete(team_role)

        if user_role is None and team_role is None:
            raise KeyError(
                f"RoleAssignment with ID {role_assignment_id} not found."
            )
        else:
            session.commit()
delete_stack(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_stack_component(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_team(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
delete_user(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
get_auth_user(self, user_name_or_id)

Gets the auth model to a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Returns:

Type Description
UserAuthModel

The requested user, if it was found.

Source code in zenml/zen_stores/sql_zen_store.py
def get_auth_user(self, user_name_or_id: Union[str, UUID]) -> UserAuthModel:
    """Gets the auth model to a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Returns:
        The requested user, if it was found.
    """
    with Session(self.engine) as session:
        user = self._get_user_schema(user_name_or_id, session=session)
        return UserAuthModel(
            id=user.id,
            name=user.name,
            full_name=user.full_name,
            email_opted_in=user.email_opted_in,
            active=user.active,
            created=user.created,
            updated=user.updated,
            password=user.password,
            activation_token=user.activation_token,
        )
get_flavor(self, flavor_id)

Get a flavor by ID.

Parameters:

Name Type Description Default
flavor_id UUID

The ID of the flavor to fetch.

required

Returns:

Type Description
FlavorResponseModel

The stack component flavor.

Exceptions:

Type Description
KeyError

if the stack component flavor doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
    """Get a flavor by ID.

    Args:
        flavor_id: The ID of the flavor to fetch.

    Returns:
        The stack component flavor.

    Raises:
        KeyError: if the stack component flavor doesn't exist.
    """
    with Session(self.engine) as session:
        flavor_in_db = session.exec(
            select(FlavorSchema).where(FlavorSchema.id == flavor_id)
        ).first()
        if flavor_in_db is None:
            raise KeyError(f"Flavor with ID {flavor_id} not found.")
        return flavor_in_db.to_model()
get_metadata_config(self, expand_certs=False)

Get the TFX metadata config of this ZenStore.

Parameters:

Name Type Description Default
expand_certs bool

Whether to expand the certificate paths in the connection config to their value.

False

Returns:

Type Description
Union[ConnectionConfig, MetadataStoreClientConfig]

The TFX metadata config of this ZenStore.

Source code in zenml/zen_stores/sql_zen_store.py
def get_metadata_config(
    self, expand_certs: bool = False
) -> Union["ConnectionConfig", "MetadataStoreClientConfig"]:
    """Get the TFX metadata config of this ZenStore.

    Args:
        expand_certs: Whether to expand the certificate paths in the
            connection config to their value.

    Returns:
        The TFX metadata config of this ZenStore.
    """
    from ml_metadata.proto.metadata_store_pb2 import (
        MetadataStoreClientConfig,
    )

    # If the gRPC metadata store connection configuration is present,
    # advertise it to the client instead of the direct SQL connection
    # config.
    if self.config.grpc_metadata_host:
        mlmd_config = MetadataStoreClientConfig()
        mlmd_config.host = self.config.grpc_metadata_host
        mlmd_config.port = self.config.grpc_metadata_port
        if self.config.grpc_metadata_ssl_ca:
            mlmd_config.ssl_config.custom_ca = (
                self.config.grpc_metadata_ssl_ca
            )
        if self.config.grpc_metadata_ssl_cert:
            mlmd_config.ssl_config.server_cert = (
                self.config.grpc_metadata_ssl_cert
            )
        if self.config.grpc_metadata_ssl_key:
            mlmd_config.ssl_config.client_key = (
                self.config.grpc_metadata_ssl_key
            )

        return mlmd_config

    return self.config.get_metadata_config(expand_certs=expand_certs)
get_or_create_run(self, pipeline_run)

Gets or creates a pipeline run.

If a run with the same ID or name already exists, it is returned. Otherwise, a new run is created.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to get or create.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Source code in zenml/zen_stores/sql_zen_store.py
def get_or_create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Gets or creates a pipeline run.

    If a run with the same ID or name already exists, it is returned.
    Otherwise, a new run is created.

    Args:
        pipeline_run: The pipeline run to get or create.

    Returns:
        The pipeline run.
    """
    # We want to have the 'create' statement in the try block since running
    # it first will reduce concurrency issues.
    try:
        return self.create_run(pipeline_run)
    except EntityExistsError:
        # Currently, an `EntityExistsError` is raised if either the run ID
        # or the run name already exists. Therefore, we need to have another
        # try block since getting the run by ID might still fail.
        try:
            return self.get_run(pipeline_run.id)
        except KeyError:
            return self.get_run(pipeline_run.name)
get_pipeline(self, pipeline_id)

Get a pipeline with a given ID.

Parameters:

Name Type Description Default
pipeline_id UUID

ID of the pipeline.

required

Returns:

Type Description
PipelineResponseModel

The pipeline.

Exceptions:

Type Description
KeyError

if the pipeline does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
    """Get a pipeline with a given ID.

    Args:
        pipeline_id: ID of the pipeline.

    Returns:
        The pipeline.

    Raises:
        KeyError: if the pipeline does not exist.
    """
    with Session(self.engine) as session:
        # Check if pipeline with the given ID exists
        pipeline = session.exec(
            select(PipelineSchema).where(PipelineSchema.id == pipeline_id)
        ).first()
        if pipeline is None:
            raise KeyError(
                f"Unable to get pipeline with ID '{pipeline_id}': "
                "No pipeline with this ID found."
            )

        return pipeline.to_model()
get_project(self, project_name_or_id)

Get an existing project by name or ID.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

Name or ID of the project to get.

required

Returns:

Type Description
ProjectResponseModel

The requested project if one was found.

Source code in zenml/zen_stores/sql_zen_store.py
def get_project(
    self, project_name_or_id: Union[str, UUID]
) -> ProjectResponseModel:
    """Get an existing project by name or ID.

    Args:
        project_name_or_id: Name or ID of the project to get.

    Returns:
        The requested project if one was found.
    """
    with Session(self.engine) as session:
        project = self._get_project_schema(
            project_name_or_id, session=session
        )
    return project.to_model()
get_role(self, role_name_or_id)

Gets a specific role.

Parameters:

Name Type Description Default
role_name_or_id Union[str, uuid.UUID]

Name or ID of the role to get.

required

Returns:

Type Description
RoleResponseModel

The requested role.

Source code in zenml/zen_stores/sql_zen_store.py
def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
    """Gets a specific role.

    Args:
        role_name_or_id: Name or ID of the role to get.

    Returns:
        The requested role.
    """
    with Session(self.engine) as session:
        role = self._get_role_schema(role_name_or_id, session=session)
        return role.to_model()
get_role_assignment(self, role_assignment_id)

Gets a role assignment by ID.

Parameters:

Name Type Description Default
role_assignment_id UUID

ID of the role assignment to get.

required

Returns:

Type Description
RoleAssignmentResponseModel

The role assignment.

Exceptions:

Type Description
KeyError

If the role assignment does not exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_role_assignment(
    self, role_assignment_id: UUID
) -> RoleAssignmentResponseModel:
    """Gets a role assignment by ID.

    Args:
        role_assignment_id: ID of the role assignment to get.

    Returns:
        The role assignment.

    Raises:
        KeyError: If the role assignment does not exist.
    """
    with Session(self.engine) as session:
        user_role = session.exec(
            select(UserRoleAssignmentSchema).where(
                UserRoleAssignmentSchema.id == role_assignment_id
            )
        ).one_or_none()

        if user_role:
            return user_role.to_model()

        team_role = session.exec(
            select(TeamRoleAssignmentSchema).where(
                TeamRoleAssignmentSchema.id == role_assignment_id
            )
        ).one_or_none()

        if team_role:
            return team_role.to_model()

        raise KeyError(
            f"RoleAssignment with ID {role_assignment_id} not found."
        )
get_run(self, run_name_or_id)

Gets a pipeline run.

Parameters:

Name Type Description Default
run_name_or_id Union[str, uuid.UUID]

The name or ID of the pipeline run to get.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Source code in zenml/zen_stores/sql_zen_store.py
def get_run(
    self, run_name_or_id: Union[str, UUID]
) -> PipelineRunResponseModel:
    """Gets a pipeline run.

    Args:
        run_name_or_id: The name or ID of the pipeline run to get.

    Returns:
        The pipeline run.
    """
    if not self.runs_inside_server:
        self._sync_runs()
    with Session(self.engine) as session:
        run = self._get_run_schema(run_name_or_id, session=session)
        return run.to_model()
get_run_step(self, step_id)

Get a step by ID.

Parameters:

Name Type Description Default
step_id UUID

The ID of the step to get.

required

Returns:

Type Description
StepRunResponseModel

The step.

Exceptions:

Type Description
KeyError

if the step doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_run_step(self, step_id: UUID) -> StepRunResponseModel:
    """Get a step by ID.

    Args:
        step_id: The ID of the step to get.

    Returns:
        The step.

    Raises:
        KeyError: if the step doesn't exist.
    """
    if not self.runs_inside_server:
        self._sync_runs()
    with Session(self.engine) as session:
        step = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == step_id)
        ).first()
        if step is None:
            raise KeyError(
                f"Unable to get step with ID {step_id}: No step with this "
                "ID found."
            )
        return self._run_step_schema_to_model(step)
get_run_step_inputs(self, step_id)

Get the inputs for a specific step.

Parameters:

Name Type Description Default
step_id UUID

The id of the step to get inputs for.

required

Returns:

Type Description
Dict[str, zenml.models.artifact_models.ArtifactResponseModel]

A dict mapping artifact names to the input artifacts for the step.

Exceptions:

Type Description
KeyError

if the step doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_run_step_inputs(
    self, step_id: UUID
) -> Dict[str, ArtifactResponseModel]:
    """Get the inputs for a specific step.

    Args:
        step_id: The id of the step to get inputs for.

    Returns:
        A dict mapping artifact names to the input artifacts for the step.

    Raises:
        KeyError: if the step doesn't exist.
    """
    with Session(self.engine) as session:
        step = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == step_id)
        ).first()
        if step is None:
            raise KeyError(
                f"Unable to get input artifacts for step with ID "
                f"{step_id}: No step with this ID found."
            )
        query_result = session.exec(
            select(ArtifactSchema, StepRunInputArtifactSchema)
            .where(
                ArtifactSchema.id == StepRunInputArtifactSchema.artifact_id
            )
            .where(StepRunInputArtifactSchema.step_id == step_id)
        ).all()
        return {
            step_input_artifact.name: artifact.to_model()
            for artifact, step_input_artifact in query_result
        }
get_stack(self, stack_id)

Get a stack by its unique ID.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack to get.

required

Returns:

Type Description
StackResponseModel

The stack with the given ID.

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_stack(self, stack_id: UUID) -> StackResponseModel:
    """Get a stack by its unique ID.

    Args:
        stack_id: The ID of the stack to get.

    Returns:
        The stack with the given ID.

    Raises:
        KeyError: if the stack doesn't exist.
    """
    with Session(self.engine) as session:
        stack = session.exec(
            select(StackSchema).where(StackSchema.id == stack_id)
        ).first()

        if stack is None:
            raise KeyError(f"Stack with ID {stack_id} not found.")
        return stack.to_model()
get_stack_component(self, component_id)

Get a stack component by ID.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to get.

required

Returns:

Type Description
ComponentResponseModel

The stack component.

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def get_stack_component(self, component_id: UUID) -> ComponentResponseModel:
    """Get a stack component by ID.

    Args:
        component_id: The ID of the stack component to get.

    Returns:
        The stack component.

    Raises:
        KeyError: if the stack component doesn't exist.
    """
    with Session(self.engine) as session:
        stack_component = session.exec(
            select(StackComponentSchema).where(
                StackComponentSchema.id == component_id
            )
        ).first()

        if stack_component is None:
            raise KeyError(
                f"Stack component with ID {component_id} not found."
            )

        return stack_component.to_model()
get_store_info(self)

Get information about the store.

Returns:

Type Description
ServerModel

Information about the store.

Source code in zenml/zen_stores/sql_zen_store.py
def get_store_info(self) -> ServerModel:
    """Get information about the store.

    Returns:
        Information about the store.
    """
    model = super().get_store_info()
    sql_url = make_url(self.config.url)
    model.database_type = ServerDatabaseType(sql_url.drivername)
    return model
get_team(self, team_name_or_id)

Gets a specific team.

Parameters:

Name Type Description Default
team_name_or_id Union[str, uuid.UUID]

Name or ID of the team to get.

required

Returns:

Type Description
TeamResponseModel

The requested team.

Source code in zenml/zen_stores/sql_zen_store.py
def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
    """Gets a specific team.

    Args:
        team_name_or_id: Name or ID of the team to get.

    Returns:
        The requested team.
    """
    with Session(self.engine) as session:
        team = self._get_team_schema(team_name_or_id, session=session)
        return team.to_model()
get_user(self, user_name_or_id)

Gets a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Returns:

Type Description
UserResponseModel

The requested user, if it was found.

Source code in zenml/zen_stores/sql_zen_store.py
def get_user(self, user_name_or_id: Union[str, UUID]) -> UserResponseModel:
    """Gets a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Returns:
        The requested user, if it was found.
    """
    with Session(self.engine) as session:
        user = self._get_user_schema(user_name_or_id, session=session)

        return user.to_model()
list_artifacts(self, artifact_uri=None, parent_step_id=None)

Lists all artifacts.

Parameters:

Name Type Description Default
artifact_uri Optional[str]

If specified, only artifacts with the given URI will be returned.

None
parent_step_id Optional[uuid.UUID]

If specified, only artifacts for the given step run will be returned.

None

Returns:

Type Description
List[zenml.models.artifact_models.ArtifactResponseModel]

A list of all artifacts.

Source code in zenml/zen_stores/sql_zen_store.py
def list_artifacts(
    self,
    artifact_uri: Optional[str] = None,
    parent_step_id: Optional[UUID] = None,
) -> List[ArtifactResponseModel]:
    """Lists all artifacts.

    Args:
        artifact_uri: If specified, only artifacts with the given URI will
            be returned.
        parent_step_id: If specified, only artifacts for the given step run
            will be returned.

    Returns:
        A list of all artifacts.
    """
    if not self.runs_inside_server:
        self._sync_runs()
    with Session(self.engine) as session:
        query = select(ArtifactSchema)
        if artifact_uri is not None:
            query = query.where(ArtifactSchema.uri == artifact_uri)
        if parent_step_id is not None:
            query = query.where(
                ArtifactSchema.parent_step_id == parent_step_id
            )
        artifacts = session.exec(query).all()
        return [artifact.to_model() for artifact in artifacts]
list_flavors(self, project_name_or_id=None, user_name_or_id=None, component_type=None, name=None, is_shared=None)

List all stack component flavors matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

Optionally filter by the Project to which the component flavors belong

None
component_type Optional[zenml.enums.StackComponentType]

Optionally filter by type of stack component

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter by the owner

None
component_type Optional[zenml.enums.StackComponentType]

Optionally filter by type of stack component

None
name Optional[str]

Optionally filter flavors by name

None
is_shared Optional[bool]

Optionally filter out flavors by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.flavor_models.FlavorResponseModel]

List of all the stack component flavors matching the given criteria

Source code in zenml/zen_stores/sql_zen_store.py
def list_flavors(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    component_type: Optional[StackComponentType] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[FlavorResponseModel]:
    """List all stack component flavors matching the given filter criteria.

    Args:
        project_name_or_id: Optionally filter by the Project to which the
            component flavors belong
        component_type: Optionally filter by type of stack component
        user_name_or_id: Optionally filter by the owner
        component_type: Optionally filter by type of stack component
        name: Optionally filter flavors by name
        is_shared: Optionally filter out flavors by whether they are
            shared or not

    Returns:
        List of all the stack component flavors matching the given criteria
    """
    with Session(self.engine) as session:
        query = select(FlavorSchema)
        if project_name_or_id:
            project = self._get_project_schema(
                project_name_or_id, session=session
            )
            query = query.where(FlavorSchema.project_id == project.id)
        if component_type:
            query = query.where(FlavorSchema.type == component_type)
        if name:
            query = query.where(FlavorSchema.name == name)
        if user_name_or_id:
            user = self._get_user_schema(user_name_or_id, session=session)
            query = query.where(FlavorSchema.user_id == user.id)

        list_of_flavors_in_db = session.exec(query).all()

        return [flavor.to_model() for flavor in list_of_flavors_in_db]
list_pipelines(self, project_name_or_id=None, user_name_or_id=None, name=None)

List all pipelines in the project.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

If provided, only list pipelines in this project.

None
user_name_or_id Union[str, uuid.UUID]

If provided, only list pipelines from this user.

None
name Optional[str]

If provided, only list pipelines with this name.

None

Returns:

Type Description
List[zenml.models.pipeline_models.PipelineResponseModel]

A list of pipelines.

Source code in zenml/zen_stores/sql_zen_store.py
def list_pipelines(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    name: Optional[str] = None,
) -> List[PipelineResponseModel]:
    """List all pipelines in the project.

    Args:
        project_name_or_id: If provided, only list pipelines in this
            project.
        user_name_or_id: If provided, only list pipelines from this user.
        name: If provided, only list pipelines with this name.

    Returns:
        A list of pipelines.
    """
    with Session(self.engine) as session:
        # Check if project with the given name exists
        query = select(PipelineSchema)
        if project_name_or_id is not None:
            project = self._get_project_schema(
                project_name_or_id, session=session
            )
            query = query.where(PipelineSchema.project_id == project.id)

        if user_name_or_id is not None:
            user = self._get_user_schema(user_name_or_id, session=session)
            query = query.where(PipelineSchema.user_id == user.id)

        if name:
            query = query.where(PipelineSchema.name == name)

        # Get all pipelines in the project
        pipelines = session.exec(query).all()
        return [pipeline.to_model() for pipeline in pipelines]
list_projects(self, name=None)

List all projects.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.project_models.ProjectResponseModel]

A list of all projects.

Source code in zenml/zen_stores/sql_zen_store.py
def list_projects(
    self, name: Optional[str] = None
) -> List[ProjectResponseModel]:
    """List all projects.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all projects.
    """
    with Session(self.engine) as session:
        query = select(ProjectSchema)
        if name:
            query = query.where(ProjectSchema.name == name)
        projects = session.exec(query.order_by(ProjectSchema.name)).all()

        return [project.to_model() for project in projects]
list_role_assignments(self, project_name_or_id=None, role_name_or_id=None, team_name_or_id=None, user_name_or_id=None)

List all role assignments.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

If provided, only return role assignments for this project.

None
role_name_or_id Union[str, uuid.UUID]

If provided, only list assignments of the given role

None
team_name_or_id Union[str, uuid.UUID]

If provided, only list assignments for this team.

None
user_name_or_id Union[str, uuid.UUID]

If provided, only list assignments for this user.

None

Returns:

Type Description
List[zenml.models.role_assignment_models.RoleAssignmentResponseModel]

A list of all role assignments.

Source code in zenml/zen_stores/sql_zen_store.py
def list_role_assignments(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    role_name_or_id: Optional[Union[str, UUID]] = None,
    team_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
) -> List[RoleAssignmentResponseModel]:
    """List all role assignments.

    Args:
        project_name_or_id: If provided, only return role assignments for
            this project.
        role_name_or_id: If provided, only list assignments of the given
            role
        team_name_or_id: If provided, only list assignments for this team.
        user_name_or_id: If provided, only list assignments for this user.

    Returns:
        A list of all role assignments.
    """
    user_role_assignments = self._list_user_role_assignments(
        project_name_or_id=project_name_or_id,
        user_name_or_id=user_name_or_id,
        role_name_or_id=role_name_or_id,
    )
    team_role_assignments = self._list_team_role_assignments(
        project_name_or_id=project_name_or_id,
        team_name_or_id=team_name_or_id,
        role_name_or_id=role_name_or_id,
    )
    return user_role_assignments + team_role_assignments
list_roles(self, name=None)

List all roles.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.role_models.RoleResponseModel]

A list of all roles.

Source code in zenml/zen_stores/sql_zen_store.py
def list_roles(self, name: Optional[str] = None) -> List[RoleResponseModel]:
    """List all roles.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all roles.
    """
    with Session(self.engine) as session:
        query = select(RoleSchema)
        if name:
            query = query.where(RoleSchema.name == name)
        roles = session.exec(query.order_by(RoleSchema.name)).all()

        return [role.to_model() for role in roles]
list_run_steps(self, run_id=None)

Get all run steps.

Parameters:

Name Type Description Default
run_id Optional[uuid.UUID]

If provided, only return steps for this pipeline run.

None

Returns:

Type Description
List[zenml.models.step_run_models.StepRunResponseModel]

A list of all run steps.

Source code in zenml/zen_stores/sql_zen_store.py
def list_run_steps(
    self, run_id: Optional[UUID] = None
) -> List[StepRunResponseModel]:
    """Get all run steps.

    Args:
        run_id: If provided, only return steps for this pipeline run.

    Returns:
        A list of all run steps.
    """
    if not self.runs_inside_server:
        self._sync_runs()
    query = select(StepRunSchema)
    if run_id is not None:
        query = query.where(StepRunSchema.pipeline_run_id == run_id)
    with Session(self.engine) as session:
        steps = session.exec(query).all()
        return [self._run_step_schema_to_model(step) for step in steps]
list_runs(self, name=None, project_name_or_id=None, stack_id=None, component_id=None, user_name_or_id=None, pipeline_id=None, unlisted=False)

Gets all pipeline runs.

Parameters:

Name Type Description Default
name Optional[str]

Run name if provided

None
project_name_or_id Union[str, uuid.UUID]

If provided, only return runs for this project.

None
stack_id Optional[uuid.UUID]

If provided, only return runs for this stack.

None
component_id Optional[uuid.UUID]

Optionally filter for runs that used the component

None
user_name_or_id Union[str, uuid.UUID]

If provided, only return runs for this user.

None
pipeline_id Optional[uuid.UUID]

If provided, only return runs for this pipeline.

None
unlisted bool

If True, only return unlisted runs that are not associated with any pipeline (filter by pipeline_id==None).

False

Returns:

Type Description
List[zenml.models.pipeline_run_models.PipelineRunResponseModel]

A list of all pipeline runs.

Source code in zenml/zen_stores/sql_zen_store.py
def list_runs(
    self,
    name: Optional[str] = None,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    stack_id: Optional[UUID] = None,
    component_id: Optional[UUID] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    pipeline_id: Optional[UUID] = None,
    unlisted: bool = False,
) -> List[PipelineRunResponseModel]:
    """Gets all pipeline runs.

    Args:
        name: Run name if provided
        project_name_or_id: If provided, only return runs for this project.
        stack_id: If provided, only return runs for this stack.
        component_id: Optionally filter for runs that used the
                      component
        user_name_or_id: If provided, only return runs for this user.
        pipeline_id: If provided, only return runs for this pipeline.
        unlisted: If True, only return unlisted runs that are not
            associated with any pipeline (filter by pipeline_id==None).

    Returns:
        A list of all pipeline runs.
    """
    if not self.runs_inside_server:
        self._sync_runs()
    with Session(self.engine) as session:
        query = select(PipelineRunSchema)
        if project_name_or_id is not None:
            project = self._get_project_schema(
                project_name_or_id, session=session
            )
            query = query.where(PipelineRunSchema.project_id == project.id)
        if stack_id is not None:
            query = query.where(PipelineRunSchema.stack_id == stack_id)
        if component_id:
            query = query.where(
                StackCompositionSchema.stack_id
                == PipelineRunSchema.stack_id
            ).where(StackCompositionSchema.component_id == component_id)
        if name is not None:
            query = query.where(PipelineRunSchema.name == name)
        if pipeline_id is not None:
            query = query.where(
                PipelineRunSchema.pipeline_id == pipeline_id
            )
        elif unlisted:
            query = query.where(is_(PipelineRunSchema.pipeline_id, None))
        if user_name_or_id is not None:
            user = self._get_user_schema(user_name_or_id, session=session)
            query = query.where(PipelineRunSchema.user_id == user.id)
        query = query.order_by(PipelineRunSchema.created)
        runs = session.exec(query).all()
        return [run.to_model() for run in runs]
list_stack_components(self, project_name_or_id=None, user_name_or_id=None, type=None, flavor_name=None, name=None, is_shared=None)

List all stack components matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

The ID or name of the Project to which the stack components belong

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter stack components by the owner

None
type Optional[str]

Optionally filter by type of stack component

None
flavor_name Optional[str]

Optionally filter by flavor

None
name Optional[str]

Optionally filter stack component by name

None
is_shared Optional[bool]

Optionally filter out stack component by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.component_models.ComponentResponseModel]

A list of all stack components matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_stack_components(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    type: Optional[str] = None,
    flavor_name: Optional[str] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[ComponentResponseModel]:
    """List all stack components matching the given filter criteria.

    Args:
        project_name_or_id: The ID or name of the Project to which the stack
            components belong
        user_name_or_id: Optionally filter stack components by the owner
        type: Optionally filter by type of stack component
        flavor_name: Optionally filter by flavor
        name: Optionally filter stack component by name
        is_shared: Optionally filter out stack component by whether they are
            shared or not

    Returns:
        A list of all stack components matching the filter criteria.
    """
    with Session(self.engine) as session:
        # Get a list of all stacks
        query = select(StackComponentSchema)
        if project_name_or_id:
            project = self._get_project_schema(
                project_name_or_id, session=session
            )
            query = query.where(
                StackComponentSchema.project_id == project.id
            )
        if user_name_or_id:
            user = self._get_user_schema(user_name_or_id, session=session)
            query = query.where(StackComponentSchema.user_id == user.id)
        if type:
            query = query.where(StackComponentSchema.type == type)
        if flavor_name:
            query = query.where(StackComponentSchema.flavor == flavor_name)
        if name:
            query = query.where(StackComponentSchema.name == name)
        if is_shared is not None:
            query = query.where(StackComponentSchema.is_shared == is_shared)

        list_of_stack_components_in_db = session.exec(query).all()

        return [comp.to_model() for comp in list_of_stack_components_in_db]
list_stacks(self, project_name_or_id=None, user_name_or_id=None, component_id=None, name=None, is_shared=None)

List all stacks matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

ID or name of the Project containing the stack

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter stacks by their owner

None
component_id Optional[uuid.UUID]

Optionally filter for stacks that contain the component

None
name Optional[str]

Optionally filter stacks by their name

None
is_shared Optional[bool]

Optionally filter out stacks by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.stack_models.StackResponseModel]

A list of all stacks matching the filter criteria.

Source code in zenml/zen_stores/sql_zen_store.py
def list_stacks(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    component_id: Optional[UUID] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[StackResponseModel]:
    """List all stacks matching the given filter criteria.

    Args:
        project_name_or_id: ID or name of the Project containing the stack
        user_name_or_id: Optionally filter stacks by their owner
        component_id: Optionally filter for stacks that contain the
                      component
        name: Optionally filter stacks by their name
        is_shared: Optionally filter out stacks by whether they are shared
            or not

    Returns:
        A list of all stacks matching the filter criteria.
    """
    with Session(self.engine) as session:
        # Get a list of all stacks
        query = select(StackSchema)
        if project_name_or_id:
            project = self._get_project_schema(
                project_name_or_id, session=session
            )
            query = query.where(StackSchema.project_id == project.id)
        if user_name_or_id:
            user = self._get_user_schema(user_name_or_id, session=session)
            query = query.where(StackSchema.user_id == user.id)
        if component_id:
            query = query.where(
                StackCompositionSchema.stack_id == StackSchema.id
            ).where(StackCompositionSchema.component_id == component_id)
        if name:
            query = query.where(StackSchema.name == name)
        if is_shared is not None:
            query = query.where(StackSchema.is_shared == is_shared)

        stacks = session.exec(query.order_by(StackSchema.name)).all()

        return [stack.to_model() for stack in stacks]
list_teams(self, name=None)

List all teams.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.team_models.TeamResponseModel]

A list of all teams.

Source code in zenml/zen_stores/sql_zen_store.py
def list_teams(self, name: Optional[str] = None) -> List[TeamResponseModel]:
    """List all teams.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all teams.
    """
    with Session(self.engine) as session:
        query = select(TeamSchema)
        if name:
            query = query.where(TeamSchema.name == name)
        teams = session.exec(query.order_by(TeamSchema.name)).all()

        return [team.to_model() for team in teams]
list_users(self, name=None)

List all users.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.user_models.UserResponseModel]

A list of all users.

Source code in zenml/zen_stores/sql_zen_store.py
def list_users(self, name: Optional[str] = None) -> List[UserResponseModel]:
    """List all users.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all users.
    """
    with Session(self.engine) as session:
        query = select(UserSchema)
        if name:
            query = query.where(UserSchema.name == name)
        users = session.exec(query.order_by(UserSchema.name)).all()

        return [user.to_model() for user in users]
migrate_database(self)

Migrate the database to the head as defined by the python package.

Source code in zenml/zen_stores/sql_zen_store.py
def migrate_database(self) -> None:
    """Migrate the database to the head as defined by the python package."""
    alembic_logger = logging.getLogger("alembic")

    # remove all existing handlers
    while len(alembic_logger.handlers):
        alembic_logger.removeHandler(alembic_logger.handlers[0])

    logging_level = get_logging_level()

    # suppress alembic info logging if the zenml logging level is not debug
    if logging_level == LoggingLevels.DEBUG:
        alembic_logger.setLevel(logging.DEBUG)
    else:
        alembic_logger.setLevel(logging.WARNING)

    alembic_logger.addHandler(get_console_handler())

    # We need to account for 3 distinct cases here:
    # 1. the database is completely empty (not initialized)
    # 2. the database is not empty, but has never been migrated with alembic
    #   before (i.e. was created with SQLModel back when alembic wasn't
    #   used)
    # 3. the database is not empty and has been migrated with alembic before
    revisions = self.alembic.current_revisions()
    if len(revisions) >= 1:
        if len(revisions) > 1:
            logger.warning(
                "The ZenML database has more than one migration head "
                "revision. This is not expected and might indicate a "
                "database migration problem. Please raise an issue on "
                "GitHub if you encounter this."
            )
        # Case 3: the database has been migrated with alembic before. Just
        # upgrade to the latest revision.
        self.alembic.upgrade()
    else:
        if self.alembic.db_is_empty():
            # Case 1: the database is empty. We can just create the
            # tables from scratch with alembic.
            self.alembic.upgrade()
        else:
            # Case 2: the database is not empty, but has never been
            # migrated with alembic before. We need to create the alembic
            # version table, initialize it with the first revision where we
            # introduced alembic and then upgrade to the latest revision.
            self.alembic.stamp(ZENML_ALEMBIC_START_REVISION)
            self.alembic.upgrade()
update_pipeline(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_project(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_role(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_run(self, run_id, run_update)

Updates a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to update.

required
run_update PipelineRunUpdateModel

The update to be applied to the pipeline run.

required

Returns:

Type Description
PipelineRunResponseModel

The updated pipeline run.

Exceptions:

Type Description
KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def update_run(
    self, run_id: UUID, run_update: PipelineRunUpdateModel
) -> PipelineRunResponseModel:
    """Updates a pipeline run.

    Args:
        run_id: The ID of the pipeline run to update.
        run_update: The update to be applied to the pipeline run.

    Returns:
        The updated pipeline run.

    Raises:
        KeyError: if the pipeline run doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if pipeline run with the given ID exists
        existing_run = session.exec(
            select(PipelineRunSchema).where(PipelineRunSchema.id == run_id)
        ).first()
        if existing_run is None:
            raise KeyError(
                f"Unable to update pipeline run with ID {run_id}: "
                f"No pipeline run with this ID found."
            )

        # Update the pipeline run
        existing_run.update(run_update=run_update)
        session.add(existing_run)
        session.commit()

        session.refresh(existing_run)
        return existing_run.to_model()
update_run_step(self, step_id, step_update)

Updates a step.

Parameters:

Name Type Description Default
step_id UUID

The ID of the step to update.

required
step_update StepRunUpdateModel

The update to be applied to the step.

required

Returns:

Type Description
StepRunResponseModel

The updated step.

Exceptions:

Type Description
KeyError

if the step doesn't exist.

Source code in zenml/zen_stores/sql_zen_store.py
def update_run_step(
    self,
    step_id: UUID,
    step_update: StepRunUpdateModel,
) -> StepRunResponseModel:
    """Updates a step.

    Args:
        step_id: The ID of the step to update.
        step_update: The update to be applied to the step.

    Returns:
        The updated step.

    Raises:
        KeyError: if the step doesn't exist.
    """
    with Session(self.engine) as session:
        # Check if the step exists
        existing_step = session.exec(
            select(StepRunSchema).where(StepRunSchema.id == step_id)
        ).first()
        if existing_step is None:
            raise KeyError(
                f"Unable to update step with ID {step_id}: "
                f"No step with this ID found."
            )

        # Update the step
        existing_step.update(step_update)
        session.add(existing_step)
        session.commit()

        session.refresh(existing_step)

        return self._run_step_schema_to_model(existing_step)
update_stack(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_stack_component(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_team(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result
update_user(*args, **kwargs)

Inner decorator function.

Parameters:

Name Type Description Default
*args Any

Arguments to be passed to the function.

()
**kwargs Any

Keyword arguments to be passed to the function.

{}

Returns:

Type Description
Any

Result of the function.

Source code in zenml/zen_stores/sql_zen_store.py
def inner_func(*args: Any, **kwargs: Any) -> Any:
    """Inner decorator function.

    Args:
        *args: Arguments to be passed to the function.
        **kwargs: Keyword arguments to be passed to the function.

    Returns:
        Result of the function.
    """
    result = func(*args, **kwargs)
    try:
        tracker: Optional[AnalyticsTrackerMixin] = None
        if len(args) and isinstance(args[0], AnalyticsTrackerMixin):
            tracker = args[0]
        for obj in [result] + list(args) + list(kwargs.values()):
            if isinstance(obj, AnalyticsTrackedModelMixin):
                obj.track_event(event_name, tracker=tracker)
                break
        else:
            if tracker:
                tracker.track_event(event_name, metadata)
            else:
                track_event(event_name, metadata)

    except Exception as e:
        logger.debug(f"Analytics tracking failure for {func}: {e}")

    return result

SqlZenStoreConfiguration (StoreConfiguration) pydantic-model

SQL ZenML store configuration.

Attributes:

Name Type Description
type StoreType

The type of the store.

driver Optional[zenml.zen_stores.sql_zen_store.SQLDatabaseDriver]

The SQL database driver.

database Optional[str]

database name. If not already present on the server, it will be created automatically on first access.

username Optional[str]

The database username.

password Optional[str]

The database password.

ssl_ca Optional[str]

certificate authority certificate. Required for SSL enabled authentication if the CA certificate is not part of the certificates shipped by the operating system.

ssl_cert Optional[str]

client certificate. Required for SSL enabled authentication if client certificates are used.

ssl_key Optional[str]

client certificate private key. Required for SSL enabled if client certificates are used.

ssl_verify_server_cert bool

set to verify the identity of the server against the provided server certificate.

pool_size int

The maximum number of connections to keep in the SQLAlchemy pool.

max_overflow int

The maximum number of connections to allow in the SQLAlchemy pool in addition to the pool_size.

grpc_metadata_host Optional[str]

The host to use for the gRPC metadata server.

grpc_metadata_port Optional[int]

The port to use for the gRPC metadata server.

grpc_metadata_ssl_ca Optional[str]

The certificate authority certificate to use for the gRPC metadata server connection.

grpc_metadata_ssl_cert Optional[str]

The client certificate to use for the gRPC metadata server connection.

grpc_metadata_ssl_key Optional[str]

The client certificate private key to use for the gRPC metadata server connection.

Source code in zenml/zen_stores/sql_zen_store.py
class SqlZenStoreConfiguration(StoreConfiguration):
    """SQL ZenML store configuration.

    Attributes:
        type: The type of the store.
        driver: The SQL database driver.
        database: database name. If not already present on the server, it will
            be created automatically on first access.
        username: The database username.
        password: The database password.
        ssl_ca: certificate authority certificate. Required for SSL
            enabled authentication if the CA certificate is not part of the
            certificates shipped by the operating system.
        ssl_cert: client certificate. Required for SSL enabled
            authentication if client certificates are used.
        ssl_key: client certificate private key. Required for SSL
            enabled if client certificates are used.
        ssl_verify_server_cert: set to verify the identity of the server
            against the provided server certificate.
        pool_size: The maximum number of connections to keep in the SQLAlchemy
            pool.
        max_overflow: The maximum number of connections to allow in the
            SQLAlchemy pool in addition to the pool_size.
        grpc_metadata_host: The host to use for the gRPC metadata server.
        grpc_metadata_port: The port to use for the gRPC metadata server.
        grpc_metadata_ssl_ca: The certificate authority certificate to use for
            the gRPC metadata server connection.
        grpc_metadata_ssl_cert: The client certificate to use for the gRPC
            metadata server connection.
        grpc_metadata_ssl_key: The client certificate private key to use for
            the gRPC metadata server connection.
    """

    type: StoreType = StoreType.SQL

    driver: Optional[SQLDatabaseDriver] = None
    database: Optional[str] = None
    username: Optional[str] = None
    password: Optional[str] = None
    ssl_ca: Optional[str] = None
    ssl_cert: Optional[str] = None
    ssl_key: Optional[str] = None
    ssl_verify_server_cert: bool = False
    pool_size: int = 20
    max_overflow: int = 20

    grpc_metadata_host: Optional[str] = None
    grpc_metadata_port: Optional[int] = None
    grpc_metadata_ssl_ca: Optional[str] = None
    grpc_metadata_ssl_key: Optional[str] = None
    grpc_metadata_ssl_cert: Optional[str] = None

    @root_validator
    def _validate_url(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        """Validate the SQL URL.

        The validator also moves the MySQL username, password and database
        parameters from the URL into the other configuration arguments, if they
        are present in the URL.

        Args:
            values: The values to validate.

        Returns:
            The validated values.

        Raises:
            ValueError: If the URL is invalid or the SQL driver is not
                supported.
        """
        # flake8: noqa: C901
        url = values.get("url")
        if url is None:
            return values

        # When running inside a container, if the URL uses localhost, the
        # target service will not be available. We try to replace localhost
        # with one of the special Docker or K3D internal hostnames.
        url = replace_localhost_with_internal_hostname(url)

        try:
            sql_url = make_url(url)
        except ArgumentError as e:
            raise ValueError(
                "Invalid SQL URL `%s`: %s. The URL must be in the format "
                "`driver://[[username:password@]hostname:port]/database["
                "?<extra-args>]`.",
                url,
                str(e),
            )

        if sql_url.drivername not in SQLDatabaseDriver.values():
            raise ValueError(
                "Invalid SQL driver value `%s`: The driver must be one of: %s.",
                url,
                ", ".join(SQLDatabaseDriver.values()),
            )
        values["driver"] = SQLDatabaseDriver(sql_url.drivername)
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            if (
                sql_url.username
                or sql_url.password
                or sql_url.query
                or sql_url.database is None
            ):
                raise ValueError(
                    "Invalid SQLite URL `%s`: The URL must be in the "
                    "format `sqlite:///path/to/database.db`.",
                    url,
                )
            if values.get("username") or values.get("password"):
                raise ValueError(
                    "Invalid SQLite configuration: The username and password "
                    "must not be set",
                    url,
                )
            values["database"] = sql_url.database
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            if sql_url.username:
                values["username"] = sql_url.username
                sql_url = sql_url._replace(username=None)
            if sql_url.password:
                values["password"] = sql_url.password
                sql_url = sql_url._replace(password=None)
            if sql_url.database:
                values["database"] = sql_url.database
                sql_url = sql_url._replace(database=None)
            if sql_url.query:
                for k, v in sql_url.query.items():
                    if k == "ssl_ca":
                        values["ssl_ca"] = v
                    elif k == "ssl_cert":
                        values["ssl_cert"] = v
                    elif k == "ssl_key":
                        values["ssl_key"] = v
                    elif k == "ssl_verify_server_cert":
                        values["ssl_verify_server_cert"] = v
                    else:
                        raise ValueError(
                            "Invalid MySQL URL query parameter `%s`: The "
                            "parameter must be one of: ssl_ca, ssl_cert, "
                            "ssl_key, or ssl_verify_server_cert.",
                            k,
                        )
                sql_url = sql_url._replace(query={})

            database = values.get("database")
            if (
                not values.get("username")
                or not values.get("password")
                or not database
            ):
                raise ValueError(
                    "Invalid MySQL configuration: The username, password and "
                    "database must be set in the URL or as configuration "
                    "attributes",
                )

            regexp = r"^[^\\/?%*:|\"<>.-]{1,64}$"
            match = re.match(regexp, database)
            if not match:
                raise ValueError(
                    f"The database name does not conform to the required "
                    f"format "
                    f"rules ({regexp}): {database}"
                )

            # Save the certificates in a secure location on disk
            secret_folder = Path(
                GlobalConfiguration().local_stores_path,
                "certificates",
            )
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                content = values.get(key)
                if content and not os.path.isfile(content):
                    fileio.makedirs(str(secret_folder))
                    file_path = Path(secret_folder, f"{key}.pem")
                    with open(file_path, "w") as f:
                        f.write(content)
                    file_path.chmod(0o600)
                    values[key] = str(file_path)

        values["url"] = str(sql_url)
        return values

    @staticmethod
    def get_local_url(path: str) -> str:
        """Get a local SQL url for a given local path.

        Args:
            path: The path to the local sqlite file.

        Returns:
            The local SQL url for the given path.
        """
        return f"sqlite:///{path}/{ZENML_SQLITE_DB_FILENAME}"

    @classmethod
    def supports_url_scheme(cls, url: str) -> bool:
        """Check if a URL scheme is supported by this store.

        Args:
            url: The URL to check.

        Returns:
            True if the URL scheme is supported, False otherwise.
        """
        return make_url(url).drivername in SQLDatabaseDriver.values()

    def expand_certificates(self) -> None:
        """Expands the certificates in the verify_ssl field."""
        # Load the certificate values back into the configuration
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            file_path = getattr(self, key, None)
            if file_path and os.path.isfile(file_path):
                with open(file_path, "r") as f:
                    setattr(self, key, f.read())

    @classmethod
    def copy_configuration(
        cls,
        config: "StoreConfiguration",
        config_path: str,
        load_config_path: Optional[PurePath] = None,
    ) -> "StoreConfiguration":
        """Copy the store config using a different configuration path.

        This method is used to create a copy of the store configuration that can
        be loaded using a different configuration path or in the context of a
        new environment, such as a container image.

        The configuration files accompanying the store configuration are also
        copied to the new configuration path (e.g. certificates etc.).

        Args:
            config: The store configuration to copy.
            config_path: new path where the configuration copy will be loaded
                from.
            load_config_path: absolute path that will be used to load the copied
                configuration. This can be set to a value different from
                `config_path` if the configuration copy will be loaded from
                a different environment, e.g. when the configuration is copied
                to a container image and loaded using a different absolute path.
                This will be reflected in the paths and URLs encoded in the
                copied configuration.

        Returns:
            A new store configuration object that reflects the new configuration
            path.
        """
        assert isinstance(config, SqlZenStoreConfiguration)
        config = config.copy()

        if config.driver == SQLDatabaseDriver.MYSQL:
            # Load the certificate values back into the configuration
            config.expand_certificates()

        elif config.driver == SQLDatabaseDriver.SQLITE:
            if load_config_path:
                config.url = cls.get_local_url(str(load_config_path))
            else:
                config.url = cls.get_local_url(config_path)

        return config

    def get_metadata_config(
        self, expand_certs: bool = False
    ) -> "ConnectionConfig":
        """Get the metadata configuration for the SQL ZenML store.

        Args:
            expand_certs: Whether to expand the certificate paths to their
                contents.

        Returns:
            The metadata configuration.

        Raises:
            NotImplementedError: If the SQL driver is not supported.
        """
        from ml_metadata.proto.metadata_store_pb2 import MySQLDatabaseConfig
        from tfx.orchestration import metadata

        sql_url = make_url(self.url)
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            assert self.database is not None
            mlmd_config = metadata.sqlite_metadata_connection_config(
                self.database
            )
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            # all these are guaranteed by our root validator
            assert self.database is not None
            assert self.username is not None
            assert self.password is not None
            assert sql_url.host is not None

            mlmd_config = metadata.mysql_metadata_connection_config(
                host=sql_url.host,
                port=sql_url.port or 3306,
                database=self.database,
                username=self.username,
                password=self.password,
            )

            mlmd_ssl_options = {}
            # Handle certificate params
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                ssl_setting = getattr(self, key)
                if not ssl_setting:
                    continue
                if expand_certs and os.path.isfile(ssl_setting):
                    with open(ssl_setting, "r") as f:
                        ssl_setting = f.read()
                mlmd_ssl_options[key.lstrip("ssl_")] = ssl_setting

            # Handle additional params
            if mlmd_ssl_options:
                mlmd_ssl_options[
                    "verify_server_cert"
                ] = self.ssl_verify_server_cert
                mlmd_config.mysql.ssl_options.CopyFrom(
                    MySQLDatabaseConfig.SSLOptions(**mlmd_ssl_options)
                )
        else:
            raise NotImplementedError(
                f"SQL driver `{sql_url.drivername}` is not supported."
            )

        return mlmd_config

    def get_sqlmodel_config(self) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
        """Get the SQLModel engine configuration for the SQL ZenML store.

        Returns:
            The URL and connection arguments for the SQLModel engine.

        Raises:
            NotImplementedError: If the SQL driver is not supported.
        """
        sql_url = make_url(self.url)
        sqlalchemy_connect_args: Dict[str, Any] = {}
        engine_args = {}
        if sql_url.drivername == SQLDatabaseDriver.SQLITE:
            assert self.database is not None
            # The following default value is needed for sqlite to avoid the
            # Error:
            #   sqlite3.ProgrammingError: SQLite objects created in a thread can
            #   only be used in that same thread.
            sqlalchemy_connect_args = {"check_same_thread": False}
        elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
            # all these are guaranteed by our root validator
            assert self.database is not None
            assert self.username is not None
            assert self.password is not None
            assert sql_url.host is not None

            engine_args = {
                "pool_size": self.pool_size,
                "max_overflow": self.max_overflow,
            }

            sql_url = sql_url._replace(
                drivername="mysql+pymysql",
                username=self.username,
                password=self.password,
                database=self.database,
            )

            sqlalchemy_ssl_args: Dict[str, Any] = {}

            # Handle SSL params
            for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
                ssl_setting = getattr(self, key)
                if not ssl_setting:
                    continue
                if not os.path.isfile(ssl_setting):
                    logger.warning(
                        f"Database SSL setting `{key}` is not a file. "
                    )
                sqlalchemy_ssl_args[key.lstrip("ssl_")] = ssl_setting
            if len(sqlalchemy_ssl_args) > 0:
                sqlalchemy_ssl_args[
                    "check_hostname"
                ] = self.ssl_verify_server_cert
                sqlalchemy_connect_args["ssl"] = sqlalchemy_ssl_args
        else:
            raise NotImplementedError(
                f"SQL driver `{sql_url.drivername}` is not supported."
            )

        return str(sql_url), sqlalchemy_connect_args, engine_args

    class Config:
        """Pydantic configuration class."""

        # Don't validate attributes when assigning them. This is necessary
        # because the certificate attributes can be expanded to the contents
        # of the certificate files.
        validate_assignment = False
        # Forbid extra attributes set in the class.
        extra = "forbid"
Config

Pydantic configuration class.

Source code in zenml/zen_stores/sql_zen_store.py
class Config:
    """Pydantic configuration class."""

    # Don't validate attributes when assigning them. This is necessary
    # because the certificate attributes can be expanded to the contents
    # of the certificate files.
    validate_assignment = False
    # Forbid extra attributes set in the class.
    extra = "forbid"
copy_configuration(config, config_path, load_config_path=None) classmethod

Copy the store config using a different configuration path.

This method is used to create a copy of the store configuration that can be loaded using a different configuration path or in the context of a new environment, such as a container image.

The configuration files accompanying the store configuration are also copied to the new configuration path (e.g. certificates etc.).

Parameters:

Name Type Description Default
config StoreConfiguration

The store configuration to copy.

required
config_path str

new path where the configuration copy will be loaded from.

required
load_config_path Optional[pathlib.PurePath]

absolute path that will be used to load the copied configuration. This can be set to a value different from config_path if the configuration copy will be loaded from a different environment, e.g. when the configuration is copied to a container image and loaded using a different absolute path. This will be reflected in the paths and URLs encoded in the copied configuration.

None

Returns:

Type Description
StoreConfiguration

A new store configuration object that reflects the new configuration path.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def copy_configuration(
    cls,
    config: "StoreConfiguration",
    config_path: str,
    load_config_path: Optional[PurePath] = None,
) -> "StoreConfiguration":
    """Copy the store config using a different configuration path.

    This method is used to create a copy of the store configuration that can
    be loaded using a different configuration path or in the context of a
    new environment, such as a container image.

    The configuration files accompanying the store configuration are also
    copied to the new configuration path (e.g. certificates etc.).

    Args:
        config: The store configuration to copy.
        config_path: new path where the configuration copy will be loaded
            from.
        load_config_path: absolute path that will be used to load the copied
            configuration. This can be set to a value different from
            `config_path` if the configuration copy will be loaded from
            a different environment, e.g. when the configuration is copied
            to a container image and loaded using a different absolute path.
            This will be reflected in the paths and URLs encoded in the
            copied configuration.

    Returns:
        A new store configuration object that reflects the new configuration
        path.
    """
    assert isinstance(config, SqlZenStoreConfiguration)
    config = config.copy()

    if config.driver == SQLDatabaseDriver.MYSQL:
        # Load the certificate values back into the configuration
        config.expand_certificates()

    elif config.driver == SQLDatabaseDriver.SQLITE:
        if load_config_path:
            config.url = cls.get_local_url(str(load_config_path))
        else:
            config.url = cls.get_local_url(config_path)

    return config
expand_certificates(self)

Expands the certificates in the verify_ssl field.

Source code in zenml/zen_stores/sql_zen_store.py
def expand_certificates(self) -> None:
    """Expands the certificates in the verify_ssl field."""
    # Load the certificate values back into the configuration
    for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
        file_path = getattr(self, key, None)
        if file_path and os.path.isfile(file_path):
            with open(file_path, "r") as f:
                setattr(self, key, f.read())
get_local_url(path) staticmethod

Get a local SQL url for a given local path.

Parameters:

Name Type Description Default
path str

The path to the local sqlite file.

required

Returns:

Type Description
str

The local SQL url for the given path.

Source code in zenml/zen_stores/sql_zen_store.py
@staticmethod
def get_local_url(path: str) -> str:
    """Get a local SQL url for a given local path.

    Args:
        path: The path to the local sqlite file.

    Returns:
        The local SQL url for the given path.
    """
    return f"sqlite:///{path}/{ZENML_SQLITE_DB_FILENAME}"
get_metadata_config(self, expand_certs=False)

Get the metadata configuration for the SQL ZenML store.

Parameters:

Name Type Description Default
expand_certs bool

Whether to expand the certificate paths to their contents.

False

Returns:

Type Description
ConnectionConfig

The metadata configuration.

Exceptions:

Type Description
NotImplementedError

If the SQL driver is not supported.

Source code in zenml/zen_stores/sql_zen_store.py
def get_metadata_config(
    self, expand_certs: bool = False
) -> "ConnectionConfig":
    """Get the metadata configuration for the SQL ZenML store.

    Args:
        expand_certs: Whether to expand the certificate paths to their
            contents.

    Returns:
        The metadata configuration.

    Raises:
        NotImplementedError: If the SQL driver is not supported.
    """
    from ml_metadata.proto.metadata_store_pb2 import MySQLDatabaseConfig
    from tfx.orchestration import metadata

    sql_url = make_url(self.url)
    if sql_url.drivername == SQLDatabaseDriver.SQLITE:
        assert self.database is not None
        mlmd_config = metadata.sqlite_metadata_connection_config(
            self.database
        )
    elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
        # all these are guaranteed by our root validator
        assert self.database is not None
        assert self.username is not None
        assert self.password is not None
        assert sql_url.host is not None

        mlmd_config = metadata.mysql_metadata_connection_config(
            host=sql_url.host,
            port=sql_url.port or 3306,
            database=self.database,
            username=self.username,
            password=self.password,
        )

        mlmd_ssl_options = {}
        # Handle certificate params
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            ssl_setting = getattr(self, key)
            if not ssl_setting:
                continue
            if expand_certs and os.path.isfile(ssl_setting):
                with open(ssl_setting, "r") as f:
                    ssl_setting = f.read()
            mlmd_ssl_options[key.lstrip("ssl_")] = ssl_setting

        # Handle additional params
        if mlmd_ssl_options:
            mlmd_ssl_options[
                "verify_server_cert"
            ] = self.ssl_verify_server_cert
            mlmd_config.mysql.ssl_options.CopyFrom(
                MySQLDatabaseConfig.SSLOptions(**mlmd_ssl_options)
            )
    else:
        raise NotImplementedError(
            f"SQL driver `{sql_url.drivername}` is not supported."
        )

    return mlmd_config
get_sqlmodel_config(self)

Get the SQLModel engine configuration for the SQL ZenML store.

Returns:

Type Description
Tuple[str, Dict[str, Any], Dict[str, Any]]

The URL and connection arguments for the SQLModel engine.

Exceptions:

Type Description
NotImplementedError

If the SQL driver is not supported.

Source code in zenml/zen_stores/sql_zen_store.py
def get_sqlmodel_config(self) -> Tuple[str, Dict[str, Any], Dict[str, Any]]:
    """Get the SQLModel engine configuration for the SQL ZenML store.

    Returns:
        The URL and connection arguments for the SQLModel engine.

    Raises:
        NotImplementedError: If the SQL driver is not supported.
    """
    sql_url = make_url(self.url)
    sqlalchemy_connect_args: Dict[str, Any] = {}
    engine_args = {}
    if sql_url.drivername == SQLDatabaseDriver.SQLITE:
        assert self.database is not None
        # The following default value is needed for sqlite to avoid the
        # Error:
        #   sqlite3.ProgrammingError: SQLite objects created in a thread can
        #   only be used in that same thread.
        sqlalchemy_connect_args = {"check_same_thread": False}
    elif sql_url.drivername == SQLDatabaseDriver.MYSQL:
        # all these are guaranteed by our root validator
        assert self.database is not None
        assert self.username is not None
        assert self.password is not None
        assert sql_url.host is not None

        engine_args = {
            "pool_size": self.pool_size,
            "max_overflow": self.max_overflow,
        }

        sql_url = sql_url._replace(
            drivername="mysql+pymysql",
            username=self.username,
            password=self.password,
            database=self.database,
        )

        sqlalchemy_ssl_args: Dict[str, Any] = {}

        # Handle SSL params
        for key in ["ssl_key", "ssl_ca", "ssl_cert"]:
            ssl_setting = getattr(self, key)
            if not ssl_setting:
                continue
            if not os.path.isfile(ssl_setting):
                logger.warning(
                    f"Database SSL setting `{key}` is not a file. "
                )
            sqlalchemy_ssl_args[key.lstrip("ssl_")] = ssl_setting
        if len(sqlalchemy_ssl_args) > 0:
            sqlalchemy_ssl_args[
                "check_hostname"
            ] = self.ssl_verify_server_cert
            sqlalchemy_connect_args["ssl"] = sqlalchemy_ssl_args
    else:
        raise NotImplementedError(
            f"SQL driver `{sql_url.drivername}` is not supported."
        )

    return str(sql_url), sqlalchemy_connect_args, engine_args
supports_url_scheme(url) classmethod

Check if a URL scheme is supported by this store.

Parameters:

Name Type Description Default
url str

The URL to check.

required

Returns:

Type Description
bool

True if the URL scheme is supported, False otherwise.

Source code in zenml/zen_stores/sql_zen_store.py
@classmethod
def supports_url_scheme(cls, url: str) -> bool:
    """Check if a URL scheme is supported by this store.

    Args:
        url: The URL to check.

    Returns:
        True if the URL scheme is supported, False otherwise.
    """
    return make_url(url).drivername in SQLDatabaseDriver.values()

zen_store_interface

ZenML Store interface.

ZenStoreInterface (ABC)

ZenML store interface.

All ZenML stores must implement the methods in this interface.

The methods in this interface are organized in the following way:

  • they are grouped into categories based on the type of resource that they operate on (e.g. stacks, stack components, etc.)

  • each category has a set of CRUD methods (create, read, update, delete) that operate on the resources in that category. The order of the methods in each category should be:

  • create methods - store a new resource. These methods should fill in generated fields (e.g. UUIDs, creation timestamps) in the resource and return the updated resource.

  • get methods - retrieve a single existing resource identified by a unique key or identifier from the store. These methods should always return a resource and raise an exception if the resource does not exist.
  • list methods - retrieve a list of resources from the store. These methods should accept a set of filter parameters that can be used to filter the list of resources retrieved from the store.
  • update methods - update an existing resource in the store. These methods should expect the updated resource to be correctly identified by its unique key or identifier and raise an exception if the resource does not exist.
  • delete methods - delete an existing resource from the store. These methods should expect the resource to be correctly identified by its unique key or identifier. If the resource does not exist, an exception should be raised.

Best practices for implementing and keeping this interface clean and easy to maintain and extend:

  • keep methods organized by resource type and ordered by CRUD operation
  • for resources with multiple keys, don't implement multiple get or list methods here if the same functionality can be achieved by a single get or list method. Instead, implement them in the BaseZenStore class and have them call the generic get or list method in this interface.
  • keep the logic required to convert between ZenML domain Model classes and internal store representations outside the ZenML domain Model classes
  • methods for resources that have two or more unique keys (e.g. a Project is uniquely identified by its name as well as its UUID) should reflect that in the method variants and/or method arguments:
    • methods that take in a resource identifier as argument should accept all variants of the identifier (e.g. project_name_or_uuid for methods that get/list/update/delete Projects)
    • if a compound key is involved, separate get methods should be implemented (e.g. get_pipeline to get a pipeline by ID and get_pipeline_in_project to get a pipeline by its name and the ID of the project it belongs to)
  • methods for resources that are scoped as children of other resources (e.g. a Stack is always owned by a Project) should reflect the key(s) of the parent resource in the provided methods and method arguments:
    • create methods should take the parent resource UUID(s) as an argument (e.g. create_stack takes in the project ID)
    • get methods should be provided to retrieve a resource by the compound key that includes the parent resource key(s)
    • list methods should feature optional filter arguments that reflect the parent resource key(s)
Source code in zenml/zen_stores/zen_store_interface.py
class ZenStoreInterface(ABC):
    """ZenML store interface.

    All ZenML stores must implement the methods in this interface.

    The methods in this interface are organized in the following way:

     * they are grouped into categories based on the type of resource
       that they operate on (e.g. stacks, stack components, etc.)

     * each category has a set of CRUD methods (create, read, update, delete)
       that operate on the resources in that category. The order of the methods
       in each category should be:

       * create methods - store a new resource. These methods
         should fill in generated fields (e.g. UUIDs, creation timestamps) in
         the resource and return the updated resource.
       * get methods - retrieve a single existing resource identified by a
         unique key or identifier from the store. These methods should always
         return a resource and raise an exception if the resource does not
         exist.
       * list methods - retrieve a list of resources from the store. These
         methods should accept a set of filter parameters that can be used to
         filter the list of resources retrieved from the store.
       * update methods - update an existing resource in the store. These
         methods should expect the updated resource to be correctly identified
         by its unique key or identifier and raise an exception if the resource
         does not exist.
       * delete methods - delete an existing resource from the store. These
         methods should expect the resource to be correctly identified by its
         unique key or identifier. If the resource does not exist,
         an exception should be raised.

    Best practices for implementing and keeping this interface clean and easy to
    maintain and extend:

      * keep methods organized by resource type and ordered by CRUD operation
      * for resources with multiple keys, don't implement multiple get or list
      methods here if the same functionality can be achieved by a single get or
      list method. Instead, implement them in the BaseZenStore class and have
      them call the generic get or list method in this interface.
      * keep the logic required to convert between ZenML domain Model classes
      and internal store representations outside the ZenML domain Model classes
      * methods for resources that have two or more unique keys (e.g. a Project
      is uniquely identified by its name as well as its UUID) should reflect
      that in the method variants and/or method arguments:
        * methods that take in a resource identifier as argument should accept
        all variants of the identifier (e.g. `project_name_or_uuid` for methods
        that get/list/update/delete Projects)
        * if a compound key is involved, separate get methods should be
        implemented (e.g. `get_pipeline` to get a pipeline by ID and
        `get_pipeline_in_project` to get a pipeline by its name and the ID of
        the project it belongs to)
      * methods for resources that are scoped as children of other resources
      (e.g. a Stack is always owned by a Project) should reflect the
      key(s) of the parent resource in the provided methods and method
      arguments:
        * create methods should take the parent resource UUID(s) as an argument
        (e.g. `create_stack` takes in the project ID)
        * get methods should be provided to retrieve a resource by the compound
        key that includes the parent resource key(s)
        * list methods should feature optional filter arguments that reflect
        the parent resource key(s)
    """

    # ---------------------------------
    # Initialization and configuration
    # ---------------------------------

    @abstractmethod
    def _initialize(self) -> None:
        """Initialize the store.

        This method is called immediately after the store is created. It should
        be used to set up the backend (database, connection etc.).
        """

    @abstractmethod
    def get_store_info(self) -> ServerModel:
        """Get information about the store.

        Returns:
            Information about the store.
        """

    # ------------
    # TFX Metadata
    # ------------

    @abstractmethod
    def get_metadata_config(
        self, expand_certs: bool = False
    ) -> Union["ConnectionConfig", "MetadataStoreClientConfig"]:
        """Get the TFX metadata config of this ZenStore.

        Args:
            expand_certs: Whether to expand the certificate paths in the
                connection config to their value.

        Returns:
            The TFX metadata config of this ZenStore.
        """

    # ------
    # Stacks
    # ------

    @abstractmethod
    def create_stack(self, stack: StackRequestModel) -> StackResponseModel:
        """Create a new stack.

        Args:
            stack: The stack to create.

        Returns:
            The created stack.

        Raises:
            StackExistsError: If a stack with the same name is already owned
                by this user in this project.
        """

    @abstractmethod
    def get_stack(self, stack_id: UUID) -> StackResponseModel:
        """Get a stack by its unique ID.

        Args:
            stack_id: The ID of the stack to get.

        Returns:
            The stack with the given ID.

        Raises:
            KeyError: if the stack doesn't exist.
        """

    @abstractmethod
    def list_stacks(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        component_id: Optional[UUID] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[StackResponseModel]:
        """List all stacks matching the given filter criteria.

        Args:
            project_name_or_id: ID or name of the Project containing the stack
            user_name_or_id: Optionally filter stacks by their owner
            component_id: Optionally filter for stacks that contain the
                          component
            name: Optionally filter stacks by their name
            is_shared: Optionally filter out stacks by whether they are shared
                or not


        Returns:
            A list of all stacks matching the filter criteria.

        Raises:
            KeyError: if the project doesn't exist.
        """

    @abstractmethod
    def update_stack(
        self, stack_id: UUID, stack_update: StackUpdateModel
    ) -> StackResponseModel:
        """Update a stack.

        Args:
            stack_id: The ID of the stack update.
            stack_update: The update request on the stack.

        Returns:
            The updated stack.

        Raises:
            KeyError: if the stack doesn't exist.
        """

    @abstractmethod
    def delete_stack(self, stack_id: UUID) -> None:
        """Delete a stack.

        Args:
            stack_id: The ID of the stack to delete.

        Raises:
            KeyError: if the stack doesn't exist.
        """

    # ----------------
    # Stack components
    # ----------------

    @abstractmethod
    def create_stack_component(
        self, component: ComponentRequestModel
    ) -> ComponentResponseModel:
        """Create a stack component.

        Args:
            component: The stack component to create.

        Returns:
            The created stack component.

        Raises:
            StackComponentExistsError: If a stack component with the same name
                and type is already owned by this user in this project.
        """

    @abstractmethod
    def list_stack_components(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        type: Optional[str] = None,
        flavor_name: Optional[str] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[ComponentResponseModel]:
        """List all stack components matching the given filter criteria.

        Args:
            project_name_or_id: The ID or name of the Project to which the stack
                components belong
            user_name_or_id: Optionally filter stack components by the owner
            type: Optionally filter by type of stack component
            flavor_name: Optionally filter by flavor
            name: Optionally filter stack component by name
            is_shared: Optionally filter out stack component by whether they are
                shared or not

        Returns:
            A list of all stack components matching the filter criteria.

        Raises:
            KeyError: if the project doesn't exist.
        """

    @abstractmethod
    def get_stack_component(self, component_id: UUID) -> ComponentResponseModel:
        """Get a stack component by ID.

        Args:
            component_id: The ID of the stack component to get.

        Returns:
            The stack component.

        Raises:
            KeyError: if the stack component doesn't exist.
        """

    @abstractmethod
    def update_stack_component(
        self,
        component_id: UUID,
        component_update: ComponentUpdateModel,
    ) -> ComponentResponseModel:
        """Update an existing stack component.

        Args:
            component_id: The ID of the stack component to update.
            component_update: The update to be applied to the stack component.

        Returns:
            The updated stack component.

        Raises:
            KeyError: if the stack component doesn't exist.
        """

    @abstractmethod
    def delete_stack_component(self, component_id: UUID) -> None:
        """Delete a stack component.

        Args:
            component_id: The ID of the stack component to delete.

        Raises:
            KeyError: if the stack component doesn't exist.
            ValueError: if the stack component is part of one or more stacks.
        """

    # -----------------------
    # Stack component flavors
    # -----------------------

    @abstractmethod
    def create_flavor(
        self,
        flavor: FlavorRequestModel,
    ) -> FlavorResponseModel:
        """Creates a new stack component flavor.

        Args:
            flavor: The stack component flavor to create.

        Returns:
            The newly created flavor.

        Raises:
            EntityExistsError: If a flavor with the same name and type
                is already owned by this user in this project.
        """

    @abstractmethod
    def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
        """Get a stack component flavor by ID.

        Args:
            flavor_id: The ID of the flavor to get.

        Returns:
            The stack component flavor.

        Raises:
            KeyError: if the stack component flavor doesn't exist.
        """

    @abstractmethod
    def list_flavors(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        component_type: Optional[StackComponentType] = None,
        name: Optional[str] = None,
        is_shared: Optional[bool] = None,
    ) -> List[FlavorResponseModel]:
        """List all stack component flavors matching the given filter criteria.

        Args:
            project_name_or_id: Optionally filter by the Project to which the
                component flavors belong
            user_name_or_id: Optionally filter by the owner
            component_type: Optionally filter by type of stack component
            name: Optionally filter flavors by name
            is_shared: Optionally filter out flavors by whether they are
                shared or not

        Returns:
            List of all the stack component flavors matching the given criteria.

        Raises:
            KeyError: if the project doesn't exist.
        """

    @abstractmethod
    def delete_flavor(self, flavor_id: UUID) -> None:
        """Delete a stack component flavor.

        Args:
            flavor_id: The ID of the stack component flavor to delete.

        Raises:
            KeyError: if the stack component flavor doesn't exist.
        """

    # -----
    # Users
    # -----
    @property
    @abstractmethod
    def active_user_name(self) -> str:
        """Gets the active username.

        Returns:
            The active username.
        """

    @abstractmethod
    def create_user(self, user: UserRequestModel) -> UserResponseModel:
        """Creates a new user.

        Args:
            user: User to be created.

        Returns:
            The newly created user.

        Raises:
            EntityExistsError: If a user with the given name already exists.
        """

    @abstractmethod
    def get_user(self, user_name_or_id: Union[str, UUID]) -> UserResponseModel:
        """Gets a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Returns:
            The requested user, if it was found.

        Raises:
            KeyError: If no user with the given name or ID exists.
        """

    @abstractmethod
    def get_auth_user(self, user_name_or_id: Union[str, UUID]) -> UserAuthModel:
        """Gets the auth model to a specific user.

        Args:
            user_name_or_id: The name or ID of the user to get.

        Returns:
            The requested user, if it was found.
        """

    @abstractmethod
    def list_users(self, name: Optional[str] = None) -> List[UserResponseModel]:
        """List all users.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all users.
        """

    @abstractmethod
    def update_user(
        self, user_id: UUID, user_update: UserUpdateModel
    ) -> UserResponseModel:
        """Updates an existing user.

        Args:
            user_id: The id of the user to update.
            user_update: The update to be applied to the user.

        Returns:
            The updated user.

        Raises:
            KeyError: If no user with the given name exists.
        """

    @abstractmethod
    def delete_user(self, user_name_or_id: Union[str, UUID]) -> None:
        """Deletes a user.

        Args:
            user_name_or_id: The name or ID of the user to delete.

        Raises:
            KeyError: If no user with the given ID exists.
        """

    # -----
    # Teams
    # -----

    @abstractmethod
    def create_team(self, team: TeamRequestModel) -> TeamResponseModel:
        """Creates a new team.

        Args:
            team: The team model to create.

        Returns:
            The newly created team.
        """

    @abstractmethod
    def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
        """Gets a specific team.

        Args:
            team_name_or_id: Name or ID of the team to get.

        Returns:
            The requested team.

        Raises:
            KeyError: If no team with the given name or ID exists.
        """

    @abstractmethod
    def list_teams(self, name: Optional[str] = None) -> List[TeamResponseModel]:
        """List all teams.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all teams.
        """

    @abstractmethod
    def update_team(
        self, team_id: UUID, team_update: TeamUpdateModel
    ) -> TeamResponseModel:
        """Update an existing team.

        Args:
            team_id: The ID of the team to be updated.
            team_update: The update to be applied to the team.

        Returns:
            The updated team.

        Raises:
            KeyError: if the team does not exist.
        """

    @abstractmethod
    def delete_team(self, team_name_or_id: Union[str, UUID]) -> None:
        """Deletes a team.

        Args:
            team_name_or_id: Name or ID of the team to delete.

        Raises:
            KeyError: If no team with the given ID exists.
        """

    # -----
    # Roles
    # -----

    @abstractmethod
    def create_role(self, role: RoleRequestModel) -> RoleResponseModel:
        """Creates a new role.

        Args:
            role: The role model to create.

        Returns:
            The newly created role.

        Raises:
            EntityExistsError: If a role with the given name already exists.
        """

    @abstractmethod
    def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
        """Gets a specific role.

        Args:
            role_name_or_id: Name or ID of the role to get.

        Returns:
            The requested role.

        Raises:
            KeyError: If no role with the given name exists.
        """

    @abstractmethod
    def list_roles(self, name: Optional[str] = None) -> List[RoleResponseModel]:
        """List all roles.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all roles.
        """

    @abstractmethod
    def update_role(
        self, role_id: UUID, role_update: RoleUpdateModel
    ) -> RoleResponseModel:
        """Update an existing role.

        Args:
            role_id: The ID of the role to be updated.
            role_update: The update to be applied to the role.

        Returns:
            The updated role.

        Raises:
            KeyError: if the role does not exist.
        """

    @abstractmethod
    def delete_role(self, role_name_or_id: Union[str, UUID]) -> None:
        """Deletes a role.

        Args:
            role_name_or_id: Name or ID of the role to delete.

        Raises:
            KeyError: If no role with the given ID exists.
        """

    # ----------------
    # Role assignments
    # ----------------
    @abstractmethod
    def create_role_assignment(
        self, role_assignment: RoleAssignmentRequestModel
    ) -> RoleAssignmentResponseModel:
        """Creates a new role assignment.

        Args:
            role_assignment: The role assignment model to create.

        Returns:
            The newly created role assignment.
        """

    @abstractmethod
    def get_role_assignment(
        self, role_assignment_id: UUID
    ) -> RoleAssignmentResponseModel:
        """Gets a specific role assignment.

        Args:
            role_assignment_id: ID of the role assignment to get.

        Returns:
            The requested role assignment.

        Raises:
            KeyError: If no role assignment with the given ID exists.
        """

    @abstractmethod
    def delete_role_assignment(self, role_assignment_id: UUID) -> None:
        """Delete a specific role assignment.

        Args:
            role_assignment_id: The ID of the specific role assignment
        """

    @abstractmethod
    def list_role_assignments(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        role_name_or_id: Optional[Union[str, UUID]] = None,
        team_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
    ) -> List[RoleAssignmentResponseModel]:
        """List all role assignments.

        Args:
            project_name_or_id: If provided, only list assignments for the given
                project
            role_name_or_id: If provided, only list assignments of the given
                role
            team_name_or_id: If provided, only list assignments for the given
                team
            user_name_or_id: If provided, only list assignments for the given
                user

        Returns:
            A list of all role assignments.
        """

    # --------
    # Projects
    # --------

    @abstractmethod
    def create_project(
        self, project: ProjectRequestModel
    ) -> ProjectResponseModel:
        """Creates a new project.

        Args:
            project: The project to create.

        Returns:
            The newly created project.

        Raises:
            EntityExistsError: If a project with the given name already exists.
        """

    @abstractmethod
    def get_project(
        self, project_name_or_id: Union[UUID, str]
    ) -> ProjectResponseModel:
        """Get an existing project by name or ID.

        Args:
            project_name_or_id: Name or ID of the project to get.

        Returns:
            The requested project.

        Raises:
            KeyError: If there is no such project.
        """

    @abstractmethod
    def list_projects(
        self, name: Optional[str] = None
    ) -> List[ProjectResponseModel]:
        """List all projects.

        Args:
            name: Optionally filter by name

        Returns:
            A list of all projects.
        """

    @abstractmethod
    def update_project(
        self, project_id: UUID, project_update: ProjectUpdateModel
    ) -> ProjectResponseModel:
        """Update an existing project.

        Args:
            project_id: The ID of the project to be updated.
            project_update: The update to be applied to the project.

        Returns:
            The updated project.

        Raises:
            KeyError: if the project does not exist.
        """

    @abstractmethod
    def delete_project(self, project_name_or_id: Union[str, UUID]) -> None:
        """Deletes a project.

        Args:
            project_name_or_id: Name or ID of the project to delete.

        Raises:
            KeyError: If no project with the given name exists.
        """

    # ---------
    # Pipelines
    # ---------
    @abstractmethod
    def create_pipeline(
        self,
        pipeline: PipelineRequestModel,
    ) -> PipelineResponseModel:
        """Creates a new pipeline in a project.

        Args:
            pipeline: The pipeline to create.

        Returns:
            The newly created pipeline.

        Raises:
            KeyError: if the project does not exist.
            EntityExistsError: If an identical pipeline already exists.
        """

    @abstractmethod
    def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
        """Get a pipeline with a given ID.

        Args:
            pipeline_id: ID of the pipeline.

        Returns:
            The pipeline.

        Raises:
            KeyError: if the pipeline does not exist.
        """

    @abstractmethod
    def list_pipelines(
        self,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        name: Optional[str] = None,
    ) -> List[PipelineResponseModel]:
        """List all pipelines in the project.

        Args:
            project_name_or_id: If provided, only list pipelines in this
                project.
            user_name_or_id: If provided, only list pipelines from this user.
            name: If provided, only list pipelines with this name.

        Returns:
            A list of pipelines.

        Raises:
            KeyError: if the project does not exist.
        """

    @abstractmethod
    def update_pipeline(
        self,
        pipeline_id: UUID,
        pipeline_update: PipelineUpdateModel,
    ) -> PipelineResponseModel:
        """Updates a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to be updated.
            pipeline_update: The update to be applied.

        Returns:
            The updated pipeline.

        Raises:
            KeyError: if the pipeline doesn't exist.
        """

    @abstractmethod
    def delete_pipeline(self, pipeline_id: UUID) -> None:
        """Deletes a pipeline.

        Args:
            pipeline_id: The ID of the pipeline to delete.

        Raises:
            KeyError: if the pipeline doesn't exist.
        """

    # --------------
    # Pipeline runs
    # --------------

    @abstractmethod
    def create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Creates a pipeline run.

        Args:
            pipeline_run: The pipeline run to create.

        Returns:
            The created pipeline run.

        Raises:
            EntityExistsError: If an identical pipeline run already exists.
            KeyError: If the pipeline does not exist.
        """

    @abstractmethod
    def get_run(
        self, run_name_or_id: Union[str, UUID]
    ) -> PipelineRunResponseModel:
        """Gets a pipeline run.

        Args:
            run_name_or_id: The name or ID of the pipeline run to get.

        Returns:
            The pipeline run.

        Raises:
            KeyError: if the pipeline run doesn't exist.
        """

    @abstractmethod
    def get_or_create_run(
        self, pipeline_run: PipelineRunRequestModel
    ) -> PipelineRunResponseModel:
        """Gets or creates a pipeline run.

        If a run with the same ID or name already exists, it is returned.
        Otherwise, a new run is created.

        Args:
            pipeline_run: The pipeline run to get or create.

        Returns:
            The pipeline run.
        """

    @abstractmethod
    def list_runs(
        self,
        name: Optional[str] = None,
        project_name_or_id: Optional[Union[str, UUID]] = None,
        stack_id: Optional[UUID] = None,
        component_id: Optional[UUID] = None,
        user_name_or_id: Optional[Union[str, UUID]] = None,
        pipeline_id: Optional[UUID] = None,
        unlisted: bool = False,
    ) -> List[PipelineRunResponseModel]:
        """Gets all pipeline runs.

        Args:
            name: Run name if provided
            project_name_or_id: If provided, only return runs for this project.
            stack_id: If provided, only return runs for this stack.
            component_id: Optionally filter for runs that used the
                          component
            user_name_or_id: If provided, only return runs for this user.
            pipeline_id: If provided, only return runs for this pipeline.
            unlisted: If True, only return unlisted runs that are not
                associated with any pipeline (filter by `pipeline_id==None`).

        Returns:
            A list of all pipeline runs.
        """

    @abstractmethod
    def update_run(
        self, run_id: UUID, run_update: PipelineRunUpdateModel
    ) -> PipelineRunResponseModel:
        """Updates a pipeline run.

        Args:
            run_id: The ID of the pipeline run to update.
            run_update: The update to be applied to the pipeline run.

        Returns:
            The updated pipeline run.

        Raises:
            KeyError: if the pipeline run doesn't exist.
        """

    # ------------------
    # Pipeline run steps
    # ------------------

    @abstractmethod
    def create_run_step(
        self, step: StepRunRequestModel
    ) -> StepRunResponseModel:
        """Creates a step.

        Args:
            step: The step to create.

        Returns:
            The created step.

        Raises:
            EntityExistsError: if the step already exists.
            KeyError: if the pipeline run doesn't exist.
        """

    @abstractmethod
    def get_run_step(self, step_id: UUID) -> StepRunResponseModel:
        """Get a step by ID.

        Args:
            step_id: The ID of the step to get.

        Returns:
            The step.

        Raises:
            KeyError: if the step doesn't exist.
        """

    @abstractmethod
    def get_run_step_inputs(
        self, step_id: UUID
    ) -> Dict[str, ArtifactResponseModel]:
        """Get a list of inputs for a specific step.

        Args:
            step_id: The id of the step to get inputs for.

        Returns:
            A dict mapping artifact names to the input artifacts for the step.
        """

    @abstractmethod
    def list_run_steps(
        self, run_id: Optional[UUID] = None
    ) -> List[StepRunResponseModel]:
        """Gets all steps in a pipeline run.

        Args:
            run_id: The ID of the pipeline run for which to list runs.

        Returns:
            A list of all run steps.
        """

    @abstractmethod
    def update_run_step(
        self,
        step_id: UUID,
        step_update: StepRunUpdateModel,
    ) -> StepRunResponseModel:
        """Updates a step.

        Args:
            step_id: The ID of the step to update.
            step_update: The update to be applied to the step.

        Returns:
            The updated step.

        Raises:
            KeyError: if the step doesn't exist.
        """

    # ---------
    # Artifacts
    # ---------

    @abstractmethod
    def create_artifact(
        self, artifact: ArtifactRequestModel
    ) -> ArtifactResponseModel:
        """Creates an artifact.

        Args:
            artifact: The artifact to create.

        Returns:
            The created artifact.

        Raises:
            KeyError: if the parent step doesn't exist.
        """

    @abstractmethod
    def list_artifacts(
        self,
        artifact_uri: Optional[str] = None,
        parent_step_id: Optional[UUID] = None,
    ) -> List[ArtifactResponseModel]:
        """Lists all artifacts.

        Args:
            artifact_uri: If specified, only artifacts with the given URI will
                be returned.
            parent_step_id: If specified, only artifacts for the given step run
                will be returned.

        Returns:
            A list of all artifacts.
        """

    # ------------------------
    # Internal utility methods
    # ------------------------
    @abstractmethod
    def _sync_runs(self) -> None:
        """Syncs runs from MLMD."""
active_user_name: str property readonly

Gets the active username.

Returns:

Type Description
str

The active username.

create_artifact(self, artifact)

Creates an artifact.

Parameters:

Name Type Description Default
artifact ArtifactRequestModel

The artifact to create.

required

Returns:

Type Description
ArtifactResponseModel

The created artifact.

Exceptions:

Type Description
KeyError

if the parent step doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_artifact(
    self, artifact: ArtifactRequestModel
) -> ArtifactResponseModel:
    """Creates an artifact.

    Args:
        artifact: The artifact to create.

    Returns:
        The created artifact.

    Raises:
        KeyError: if the parent step doesn't exist.
    """
create_flavor(self, flavor)

Creates a new stack component flavor.

Parameters:

Name Type Description Default
flavor FlavorRequestModel

The stack component flavor to create.

required

Returns:

Type Description
FlavorResponseModel

The newly created flavor.

Exceptions:

Type Description
EntityExistsError

If a flavor with the same name and type is already owned by this user in this project.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_flavor(
    self,
    flavor: FlavorRequestModel,
) -> FlavorResponseModel:
    """Creates a new stack component flavor.

    Args:
        flavor: The stack component flavor to create.

    Returns:
        The newly created flavor.

    Raises:
        EntityExistsError: If a flavor with the same name and type
            is already owned by this user in this project.
    """
create_pipeline(self, pipeline)

Creates a new pipeline in a project.

Parameters:

Name Type Description Default
pipeline PipelineRequestModel

The pipeline to create.

required

Returns:

Type Description
PipelineResponseModel

The newly created pipeline.

Exceptions:

Type Description
KeyError

if the project does not exist.

EntityExistsError

If an identical pipeline already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_pipeline(
    self,
    pipeline: PipelineRequestModel,
) -> PipelineResponseModel:
    """Creates a new pipeline in a project.

    Args:
        pipeline: The pipeline to create.

    Returns:
        The newly created pipeline.

    Raises:
        KeyError: if the project does not exist.
        EntityExistsError: If an identical pipeline already exists.
    """
create_project(self, project)

Creates a new project.

Parameters:

Name Type Description Default
project ProjectRequestModel

The project to create.

required

Returns:

Type Description
ProjectResponseModel

The newly created project.

Exceptions:

Type Description
EntityExistsError

If a project with the given name already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_project(
    self, project: ProjectRequestModel
) -> ProjectResponseModel:
    """Creates a new project.

    Args:
        project: The project to create.

    Returns:
        The newly created project.

    Raises:
        EntityExistsError: If a project with the given name already exists.
    """
create_role(self, role)

Creates a new role.

Parameters:

Name Type Description Default
role RoleRequestModel

The role model to create.

required

Returns:

Type Description
RoleResponseModel

The newly created role.

Exceptions:

Type Description
EntityExistsError

If a role with the given name already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_role(self, role: RoleRequestModel) -> RoleResponseModel:
    """Creates a new role.

    Args:
        role: The role model to create.

    Returns:
        The newly created role.

    Raises:
        EntityExistsError: If a role with the given name already exists.
    """
create_role_assignment(self, role_assignment)

Creates a new role assignment.

Parameters:

Name Type Description Default
role_assignment RoleAssignmentRequestModel

The role assignment model to create.

required

Returns:

Type Description
RoleAssignmentResponseModel

The newly created role assignment.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_role_assignment(
    self, role_assignment: RoleAssignmentRequestModel
) -> RoleAssignmentResponseModel:
    """Creates a new role assignment.

    Args:
        role_assignment: The role assignment model to create.

    Returns:
        The newly created role assignment.
    """
create_run(self, pipeline_run)

Creates a pipeline run.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to create.

required

Returns:

Type Description
PipelineRunResponseModel

The created pipeline run.

Exceptions:

Type Description
EntityExistsError

If an identical pipeline run already exists.

KeyError

If the pipeline does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Creates a pipeline run.

    Args:
        pipeline_run: The pipeline run to create.

    Returns:
        The created pipeline run.

    Raises:
        EntityExistsError: If an identical pipeline run already exists.
        KeyError: If the pipeline does not exist.
    """
create_run_step(self, step)

Creates a step.

Parameters:

Name Type Description Default
step StepRunRequestModel

The step to create.

required

Returns:

Type Description
StepRunResponseModel

The created step.

Exceptions:

Type Description
EntityExistsError

if the step already exists.

KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_run_step(
    self, step: StepRunRequestModel
) -> StepRunResponseModel:
    """Creates a step.

    Args:
        step: The step to create.

    Returns:
        The created step.

    Raises:
        EntityExistsError: if the step already exists.
        KeyError: if the pipeline run doesn't exist.
    """
create_stack(self, stack)

Create a new stack.

Parameters:

Name Type Description Default
stack StackRequestModel

The stack to create.

required

Returns:

Type Description
StackResponseModel

The created stack.

Exceptions:

Type Description
StackExistsError

If a stack with the same name is already owned by this user in this project.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_stack(self, stack: StackRequestModel) -> StackResponseModel:
    """Create a new stack.

    Args:
        stack: The stack to create.

    Returns:
        The created stack.

    Raises:
        StackExistsError: If a stack with the same name is already owned
            by this user in this project.
    """
create_stack_component(self, component)

Create a stack component.

Parameters:

Name Type Description Default
component ComponentRequestModel

The stack component to create.

required

Returns:

Type Description
ComponentResponseModel

The created stack component.

Exceptions:

Type Description
StackComponentExistsError

If a stack component with the same name and type is already owned by this user in this project.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_stack_component(
    self, component: ComponentRequestModel
) -> ComponentResponseModel:
    """Create a stack component.

    Args:
        component: The stack component to create.

    Returns:
        The created stack component.

    Raises:
        StackComponentExistsError: If a stack component with the same name
            and type is already owned by this user in this project.
    """
create_team(self, team)

Creates a new team.

Parameters:

Name Type Description Default
team TeamRequestModel

The team model to create.

required

Returns:

Type Description
TeamResponseModel

The newly created team.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_team(self, team: TeamRequestModel) -> TeamResponseModel:
    """Creates a new team.

    Args:
        team: The team model to create.

    Returns:
        The newly created team.
    """
create_user(self, user)

Creates a new user.

Parameters:

Name Type Description Default
user UserRequestModel

User to be created.

required

Returns:

Type Description
UserResponseModel

The newly created user.

Exceptions:

Type Description
EntityExistsError

If a user with the given name already exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def create_user(self, user: UserRequestModel) -> UserResponseModel:
    """Creates a new user.

    Args:
        user: User to be created.

    Returns:
        The newly created user.

    Raises:
        EntityExistsError: If a user with the given name already exists.
    """
delete_flavor(self, flavor_id)

Delete a stack component flavor.

Parameters:

Name Type Description Default
flavor_id UUID

The ID of the stack component flavor to delete.

required

Exceptions:

Type Description
KeyError

if the stack component flavor doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_flavor(self, flavor_id: UUID) -> None:
    """Delete a stack component flavor.

    Args:
        flavor_id: The ID of the stack component flavor to delete.

    Raises:
        KeyError: if the stack component flavor doesn't exist.
    """
delete_pipeline(self, pipeline_id)

Deletes a pipeline.

Parameters:

Name Type Description Default
pipeline_id UUID

The ID of the pipeline to delete.

required

Exceptions:

Type Description
KeyError

if the pipeline doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_pipeline(self, pipeline_id: UUID) -> None:
    """Deletes a pipeline.

    Args:
        pipeline_id: The ID of the pipeline to delete.

    Raises:
        KeyError: if the pipeline doesn't exist.
    """
delete_project(self, project_name_or_id)

Deletes a project.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

Name or ID of the project to delete.

required

Exceptions:

Type Description
KeyError

If no project with the given name exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_project(self, project_name_or_id: Union[str, UUID]) -> None:
    """Deletes a project.

    Args:
        project_name_or_id: Name or ID of the project to delete.

    Raises:
        KeyError: If no project with the given name exists.
    """
delete_role(self, role_name_or_id)

Deletes a role.

Parameters:

Name Type Description Default
role_name_or_id Union[str, uuid.UUID]

Name or ID of the role to delete.

required

Exceptions:

Type Description
KeyError

If no role with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_role(self, role_name_or_id: Union[str, UUID]) -> None:
    """Deletes a role.

    Args:
        role_name_or_id: Name or ID of the role to delete.

    Raises:
        KeyError: If no role with the given ID exists.
    """
delete_role_assignment(self, role_assignment_id)

Delete a specific role assignment.

Parameters:

Name Type Description Default
role_assignment_id UUID

The ID of the specific role assignment

required
Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_role_assignment(self, role_assignment_id: UUID) -> None:
    """Delete a specific role assignment.

    Args:
        role_assignment_id: The ID of the specific role assignment
    """
delete_stack(self, stack_id)

Delete a stack.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack to delete.

required

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_stack(self, stack_id: UUID) -> None:
    """Delete a stack.

    Args:
        stack_id: The ID of the stack to delete.

    Raises:
        KeyError: if the stack doesn't exist.
    """
delete_stack_component(self, component_id)

Delete a stack component.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to delete.

required

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

ValueError

if the stack component is part of one or more stacks.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_stack_component(self, component_id: UUID) -> None:
    """Delete a stack component.

    Args:
        component_id: The ID of the stack component to delete.

    Raises:
        KeyError: if the stack component doesn't exist.
        ValueError: if the stack component is part of one or more stacks.
    """
delete_team(self, team_name_or_id)

Deletes a team.

Parameters:

Name Type Description Default
team_name_or_id Union[str, uuid.UUID]

Name or ID of the team to delete.

required

Exceptions:

Type Description
KeyError

If no team with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_team(self, team_name_or_id: Union[str, UUID]) -> None:
    """Deletes a team.

    Args:
        team_name_or_id: Name or ID of the team to delete.

    Raises:
        KeyError: If no team with the given ID exists.
    """
delete_user(self, user_name_or_id)

Deletes a user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to delete.

required

Exceptions:

Type Description
KeyError

If no user with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def delete_user(self, user_name_or_id: Union[str, UUID]) -> None:
    """Deletes a user.

    Args:
        user_name_or_id: The name or ID of the user to delete.

    Raises:
        KeyError: If no user with the given ID exists.
    """
get_auth_user(self, user_name_or_id)

Gets the auth model to a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Returns:

Type Description
UserAuthModel

The requested user, if it was found.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_auth_user(self, user_name_or_id: Union[str, UUID]) -> UserAuthModel:
    """Gets the auth model to a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Returns:
        The requested user, if it was found.
    """
get_flavor(self, flavor_id)

Get a stack component flavor by ID.

Parameters:

Name Type Description Default
flavor_id UUID

The ID of the flavor to get.

required

Returns:

Type Description
FlavorResponseModel

The stack component flavor.

Exceptions:

Type Description
KeyError

if the stack component flavor doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_flavor(self, flavor_id: UUID) -> FlavorResponseModel:
    """Get a stack component flavor by ID.

    Args:
        flavor_id: The ID of the flavor to get.

    Returns:
        The stack component flavor.

    Raises:
        KeyError: if the stack component flavor doesn't exist.
    """
get_metadata_config(self, expand_certs=False)

Get the TFX metadata config of this ZenStore.

Parameters:

Name Type Description Default
expand_certs bool

Whether to expand the certificate paths in the connection config to their value.

False

Returns:

Type Description
Union[ConnectionConfig, MetadataStoreClientConfig]

The TFX metadata config of this ZenStore.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_metadata_config(
    self, expand_certs: bool = False
) -> Union["ConnectionConfig", "MetadataStoreClientConfig"]:
    """Get the TFX metadata config of this ZenStore.

    Args:
        expand_certs: Whether to expand the certificate paths in the
            connection config to their value.

    Returns:
        The TFX metadata config of this ZenStore.
    """
get_or_create_run(self, pipeline_run)

Gets or creates a pipeline run.

If a run with the same ID or name already exists, it is returned. Otherwise, a new run is created.

Parameters:

Name Type Description Default
pipeline_run PipelineRunRequestModel

The pipeline run to get or create.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_or_create_run(
    self, pipeline_run: PipelineRunRequestModel
) -> PipelineRunResponseModel:
    """Gets or creates a pipeline run.

    If a run with the same ID or name already exists, it is returned.
    Otherwise, a new run is created.

    Args:
        pipeline_run: The pipeline run to get or create.

    Returns:
        The pipeline run.
    """
get_pipeline(self, pipeline_id)

Get a pipeline with a given ID.

Parameters:

Name Type Description Default
pipeline_id UUID

ID of the pipeline.

required

Returns:

Type Description
PipelineResponseModel

The pipeline.

Exceptions:

Type Description
KeyError

if the pipeline does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_pipeline(self, pipeline_id: UUID) -> PipelineResponseModel:
    """Get a pipeline with a given ID.

    Args:
        pipeline_id: ID of the pipeline.

    Returns:
        The pipeline.

    Raises:
        KeyError: if the pipeline does not exist.
    """
get_project(self, project_name_or_id)

Get an existing project by name or ID.

Parameters:

Name Type Description Default
project_name_or_id Union[uuid.UUID, str]

Name or ID of the project to get.

required

Returns:

Type Description
ProjectResponseModel

The requested project.

Exceptions:

Type Description
KeyError

If there is no such project.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_project(
    self, project_name_or_id: Union[UUID, str]
) -> ProjectResponseModel:
    """Get an existing project by name or ID.

    Args:
        project_name_or_id: Name or ID of the project to get.

    Returns:
        The requested project.

    Raises:
        KeyError: If there is no such project.
    """
get_role(self, role_name_or_id)

Gets a specific role.

Parameters:

Name Type Description Default
role_name_or_id Union[str, uuid.UUID]

Name or ID of the role to get.

required

Returns:

Type Description
RoleResponseModel

The requested role.

Exceptions:

Type Description
KeyError

If no role with the given name exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_role(self, role_name_or_id: Union[str, UUID]) -> RoleResponseModel:
    """Gets a specific role.

    Args:
        role_name_or_id: Name or ID of the role to get.

    Returns:
        The requested role.

    Raises:
        KeyError: If no role with the given name exists.
    """
get_role_assignment(self, role_assignment_id)

Gets a specific role assignment.

Parameters:

Name Type Description Default
role_assignment_id UUID

ID of the role assignment to get.

required

Returns:

Type Description
RoleAssignmentResponseModel

The requested role assignment.

Exceptions:

Type Description
KeyError

If no role assignment with the given ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_role_assignment(
    self, role_assignment_id: UUID
) -> RoleAssignmentResponseModel:
    """Gets a specific role assignment.

    Args:
        role_assignment_id: ID of the role assignment to get.

    Returns:
        The requested role assignment.

    Raises:
        KeyError: If no role assignment with the given ID exists.
    """
get_run(self, run_name_or_id)

Gets a pipeline run.

Parameters:

Name Type Description Default
run_name_or_id Union[str, uuid.UUID]

The name or ID of the pipeline run to get.

required

Returns:

Type Description
PipelineRunResponseModel

The pipeline run.

Exceptions:

Type Description
KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_run(
    self, run_name_or_id: Union[str, UUID]
) -> PipelineRunResponseModel:
    """Gets a pipeline run.

    Args:
        run_name_or_id: The name or ID of the pipeline run to get.

    Returns:
        The pipeline run.

    Raises:
        KeyError: if the pipeline run doesn't exist.
    """
get_run_step(self, step_id)

Get a step by ID.

Parameters:

Name Type Description Default
step_id UUID

The ID of the step to get.

required

Returns:

Type Description
StepRunResponseModel

The step.

Exceptions:

Type Description
KeyError

if the step doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_run_step(self, step_id: UUID) -> StepRunResponseModel:
    """Get a step by ID.

    Args:
        step_id: The ID of the step to get.

    Returns:
        The step.

    Raises:
        KeyError: if the step doesn't exist.
    """
get_run_step_inputs(self, step_id)

Get a list of inputs for a specific step.

Parameters:

Name Type Description Default
step_id UUID

The id of the step to get inputs for.

required

Returns:

Type Description
Dict[str, zenml.models.artifact_models.ArtifactResponseModel]

A dict mapping artifact names to the input artifacts for the step.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_run_step_inputs(
    self, step_id: UUID
) -> Dict[str, ArtifactResponseModel]:
    """Get a list of inputs for a specific step.

    Args:
        step_id: The id of the step to get inputs for.

    Returns:
        A dict mapping artifact names to the input artifacts for the step.
    """
get_stack(self, stack_id)

Get a stack by its unique ID.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack to get.

required

Returns:

Type Description
StackResponseModel

The stack with the given ID.

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_stack(self, stack_id: UUID) -> StackResponseModel:
    """Get a stack by its unique ID.

    Args:
        stack_id: The ID of the stack to get.

    Returns:
        The stack with the given ID.

    Raises:
        KeyError: if the stack doesn't exist.
    """
get_stack_component(self, component_id)

Get a stack component by ID.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to get.

required

Returns:

Type Description
ComponentResponseModel

The stack component.

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_stack_component(self, component_id: UUID) -> ComponentResponseModel:
    """Get a stack component by ID.

    Args:
        component_id: The ID of the stack component to get.

    Returns:
        The stack component.

    Raises:
        KeyError: if the stack component doesn't exist.
    """
get_store_info(self)

Get information about the store.

Returns:

Type Description
ServerModel

Information about the store.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_store_info(self) -> ServerModel:
    """Get information about the store.

    Returns:
        Information about the store.
    """
get_team(self, team_name_or_id)

Gets a specific team.

Parameters:

Name Type Description Default
team_name_or_id Union[str, uuid.UUID]

Name or ID of the team to get.

required

Returns:

Type Description
TeamResponseModel

The requested team.

Exceptions:

Type Description
KeyError

If no team with the given name or ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_team(self, team_name_or_id: Union[str, UUID]) -> TeamResponseModel:
    """Gets a specific team.

    Args:
        team_name_or_id: Name or ID of the team to get.

    Returns:
        The requested team.

    Raises:
        KeyError: If no team with the given name or ID exists.
    """
get_user(self, user_name_or_id)

Gets a specific user.

Parameters:

Name Type Description Default
user_name_or_id Union[str, uuid.UUID]

The name or ID of the user to get.

required

Returns:

Type Description
UserResponseModel

The requested user, if it was found.

Exceptions:

Type Description
KeyError

If no user with the given name or ID exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def get_user(self, user_name_or_id: Union[str, UUID]) -> UserResponseModel:
    """Gets a specific user.

    Args:
        user_name_or_id: The name or ID of the user to get.

    Returns:
        The requested user, if it was found.

    Raises:
        KeyError: If no user with the given name or ID exists.
    """
list_artifacts(self, artifact_uri=None, parent_step_id=None)

Lists all artifacts.

Parameters:

Name Type Description Default
artifact_uri Optional[str]

If specified, only artifacts with the given URI will be returned.

None
parent_step_id Optional[uuid.UUID]

If specified, only artifacts for the given step run will be returned.

None

Returns:

Type Description
List[zenml.models.artifact_models.ArtifactResponseModel]

A list of all artifacts.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_artifacts(
    self,
    artifact_uri: Optional[str] = None,
    parent_step_id: Optional[UUID] = None,
) -> List[ArtifactResponseModel]:
    """Lists all artifacts.

    Args:
        artifact_uri: If specified, only artifacts with the given URI will
            be returned.
        parent_step_id: If specified, only artifacts for the given step run
            will be returned.

    Returns:
        A list of all artifacts.
    """
list_flavors(self, project_name_or_id=None, user_name_or_id=None, component_type=None, name=None, is_shared=None)

List all stack component flavors matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

Optionally filter by the Project to which the component flavors belong

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter by the owner

None
component_type Optional[zenml.enums.StackComponentType]

Optionally filter by type of stack component

None
name Optional[str]

Optionally filter flavors by name

None
is_shared Optional[bool]

Optionally filter out flavors by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.flavor_models.FlavorResponseModel]

List of all the stack component flavors matching the given criteria.

Exceptions:

Type Description
KeyError

if the project doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_flavors(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    component_type: Optional[StackComponentType] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[FlavorResponseModel]:
    """List all stack component flavors matching the given filter criteria.

    Args:
        project_name_or_id: Optionally filter by the Project to which the
            component flavors belong
        user_name_or_id: Optionally filter by the owner
        component_type: Optionally filter by type of stack component
        name: Optionally filter flavors by name
        is_shared: Optionally filter out flavors by whether they are
            shared or not

    Returns:
        List of all the stack component flavors matching the given criteria.

    Raises:
        KeyError: if the project doesn't exist.
    """
list_pipelines(self, project_name_or_id=None, user_name_or_id=None, name=None)

List all pipelines in the project.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

If provided, only list pipelines in this project.

None
user_name_or_id Union[str, uuid.UUID]

If provided, only list pipelines from this user.

None
name Optional[str]

If provided, only list pipelines with this name.

None

Returns:

Type Description
List[zenml.models.pipeline_models.PipelineResponseModel]

A list of pipelines.

Exceptions:

Type Description
KeyError

if the project does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_pipelines(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    name: Optional[str] = None,
) -> List[PipelineResponseModel]:
    """List all pipelines in the project.

    Args:
        project_name_or_id: If provided, only list pipelines in this
            project.
        user_name_or_id: If provided, only list pipelines from this user.
        name: If provided, only list pipelines with this name.

    Returns:
        A list of pipelines.

    Raises:
        KeyError: if the project does not exist.
    """
list_projects(self, name=None)

List all projects.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.project_models.ProjectResponseModel]

A list of all projects.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_projects(
    self, name: Optional[str] = None
) -> List[ProjectResponseModel]:
    """List all projects.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all projects.
    """
list_role_assignments(self, project_name_or_id=None, role_name_or_id=None, team_name_or_id=None, user_name_or_id=None)

List all role assignments.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

If provided, only list assignments for the given project

None
role_name_or_id Union[str, uuid.UUID]

If provided, only list assignments of the given role

None
team_name_or_id Union[str, uuid.UUID]

If provided, only list assignments for the given team

None
user_name_or_id Union[str, uuid.UUID]

If provided, only list assignments for the given user

None

Returns:

Type Description
List[zenml.models.role_assignment_models.RoleAssignmentResponseModel]

A list of all role assignments.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_role_assignments(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    role_name_or_id: Optional[Union[str, UUID]] = None,
    team_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
) -> List[RoleAssignmentResponseModel]:
    """List all role assignments.

    Args:
        project_name_or_id: If provided, only list assignments for the given
            project
        role_name_or_id: If provided, only list assignments of the given
            role
        team_name_or_id: If provided, only list assignments for the given
            team
        user_name_or_id: If provided, only list assignments for the given
            user

    Returns:
        A list of all role assignments.
    """
list_roles(self, name=None)

List all roles.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.role_models.RoleResponseModel]

A list of all roles.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_roles(self, name: Optional[str] = None) -> List[RoleResponseModel]:
    """List all roles.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all roles.
    """
list_run_steps(self, run_id=None)

Gets all steps in a pipeline run.

Parameters:

Name Type Description Default
run_id Optional[uuid.UUID]

The ID of the pipeline run for which to list runs.

None

Returns:

Type Description
List[zenml.models.step_run_models.StepRunResponseModel]

A list of all run steps.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_run_steps(
    self, run_id: Optional[UUID] = None
) -> List[StepRunResponseModel]:
    """Gets all steps in a pipeline run.

    Args:
        run_id: The ID of the pipeline run for which to list runs.

    Returns:
        A list of all run steps.
    """
list_runs(self, name=None, project_name_or_id=None, stack_id=None, component_id=None, user_name_or_id=None, pipeline_id=None, unlisted=False)

Gets all pipeline runs.

Parameters:

Name Type Description Default
name Optional[str]

Run name if provided

None
project_name_or_id Union[str, uuid.UUID]

If provided, only return runs for this project.

None
stack_id Optional[uuid.UUID]

If provided, only return runs for this stack.

None
component_id Optional[uuid.UUID]

Optionally filter for runs that used the component

None
user_name_or_id Union[str, uuid.UUID]

If provided, only return runs for this user.

None
pipeline_id Optional[uuid.UUID]

If provided, only return runs for this pipeline.

None
unlisted bool

If True, only return unlisted runs that are not associated with any pipeline (filter by pipeline_id==None).

False

Returns:

Type Description
List[zenml.models.pipeline_run_models.PipelineRunResponseModel]

A list of all pipeline runs.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_runs(
    self,
    name: Optional[str] = None,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    stack_id: Optional[UUID] = None,
    component_id: Optional[UUID] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    pipeline_id: Optional[UUID] = None,
    unlisted: bool = False,
) -> List[PipelineRunResponseModel]:
    """Gets all pipeline runs.

    Args:
        name: Run name if provided
        project_name_or_id: If provided, only return runs for this project.
        stack_id: If provided, only return runs for this stack.
        component_id: Optionally filter for runs that used the
                      component
        user_name_or_id: If provided, only return runs for this user.
        pipeline_id: If provided, only return runs for this pipeline.
        unlisted: If True, only return unlisted runs that are not
            associated with any pipeline (filter by `pipeline_id==None`).

    Returns:
        A list of all pipeline runs.
    """
list_stack_components(self, project_name_or_id=None, user_name_or_id=None, type=None, flavor_name=None, name=None, is_shared=None)

List all stack components matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

The ID or name of the Project to which the stack components belong

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter stack components by the owner

None
type Optional[str]

Optionally filter by type of stack component

None
flavor_name Optional[str]

Optionally filter by flavor

None
name Optional[str]

Optionally filter stack component by name

None
is_shared Optional[bool]

Optionally filter out stack component by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.component_models.ComponentResponseModel]

A list of all stack components matching the filter criteria.

Exceptions:

Type Description
KeyError

if the project doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_stack_components(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    type: Optional[str] = None,
    flavor_name: Optional[str] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[ComponentResponseModel]:
    """List all stack components matching the given filter criteria.

    Args:
        project_name_or_id: The ID or name of the Project to which the stack
            components belong
        user_name_or_id: Optionally filter stack components by the owner
        type: Optionally filter by type of stack component
        flavor_name: Optionally filter by flavor
        name: Optionally filter stack component by name
        is_shared: Optionally filter out stack component by whether they are
            shared or not

    Returns:
        A list of all stack components matching the filter criteria.

    Raises:
        KeyError: if the project doesn't exist.
    """
list_stacks(self, project_name_or_id=None, user_name_or_id=None, component_id=None, name=None, is_shared=None)

List all stacks matching the given filter criteria.

Parameters:

Name Type Description Default
project_name_or_id Union[str, uuid.UUID]

ID or name of the Project containing the stack

None
user_name_or_id Union[str, uuid.UUID]

Optionally filter stacks by their owner

None
component_id Optional[uuid.UUID]

Optionally filter for stacks that contain the component

None
name Optional[str]

Optionally filter stacks by their name

None
is_shared Optional[bool]

Optionally filter out stacks by whether they are shared or not

None

Returns:

Type Description
List[zenml.models.stack_models.StackResponseModel]

A list of all stacks matching the filter criteria.

Exceptions:

Type Description
KeyError

if the project doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_stacks(
    self,
    project_name_or_id: Optional[Union[str, UUID]] = None,
    user_name_or_id: Optional[Union[str, UUID]] = None,
    component_id: Optional[UUID] = None,
    name: Optional[str] = None,
    is_shared: Optional[bool] = None,
) -> List[StackResponseModel]:
    """List all stacks matching the given filter criteria.

    Args:
        project_name_or_id: ID or name of the Project containing the stack
        user_name_or_id: Optionally filter stacks by their owner
        component_id: Optionally filter for stacks that contain the
                      component
        name: Optionally filter stacks by their name
        is_shared: Optionally filter out stacks by whether they are shared
            or not


    Returns:
        A list of all stacks matching the filter criteria.

    Raises:
        KeyError: if the project doesn't exist.
    """
list_teams(self, name=None)

List all teams.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.team_models.TeamResponseModel]

A list of all teams.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_teams(self, name: Optional[str] = None) -> List[TeamResponseModel]:
    """List all teams.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all teams.
    """
list_users(self, name=None)

List all users.

Parameters:

Name Type Description Default
name Optional[str]

Optionally filter by name

None

Returns:

Type Description
List[zenml.models.user_models.UserResponseModel]

A list of all users.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def list_users(self, name: Optional[str] = None) -> List[UserResponseModel]:
    """List all users.

    Args:
        name: Optionally filter by name

    Returns:
        A list of all users.
    """
update_pipeline(self, pipeline_id, pipeline_update)

Updates a pipeline.

Parameters:

Name Type Description Default
pipeline_id UUID

The ID of the pipeline to be updated.

required
pipeline_update PipelineUpdateModel

The update to be applied.

required

Returns:

Type Description
PipelineResponseModel

The updated pipeline.

Exceptions:

Type Description
KeyError

if the pipeline doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_pipeline(
    self,
    pipeline_id: UUID,
    pipeline_update: PipelineUpdateModel,
) -> PipelineResponseModel:
    """Updates a pipeline.

    Args:
        pipeline_id: The ID of the pipeline to be updated.
        pipeline_update: The update to be applied.

    Returns:
        The updated pipeline.

    Raises:
        KeyError: if the pipeline doesn't exist.
    """
update_project(self, project_id, project_update)

Update an existing project.

Parameters:

Name Type Description Default
project_id UUID

The ID of the project to be updated.

required
project_update ProjectUpdateModel

The update to be applied to the project.

required

Returns:

Type Description
ProjectResponseModel

The updated project.

Exceptions:

Type Description
KeyError

if the project does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_project(
    self, project_id: UUID, project_update: ProjectUpdateModel
) -> ProjectResponseModel:
    """Update an existing project.

    Args:
        project_id: The ID of the project to be updated.
        project_update: The update to be applied to the project.

    Returns:
        The updated project.

    Raises:
        KeyError: if the project does not exist.
    """
update_role(self, role_id, role_update)

Update an existing role.

Parameters:

Name Type Description Default
role_id UUID

The ID of the role to be updated.

required
role_update RoleUpdateModel

The update to be applied to the role.

required

Returns:

Type Description
RoleResponseModel

The updated role.

Exceptions:

Type Description
KeyError

if the role does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_role(
    self, role_id: UUID, role_update: RoleUpdateModel
) -> RoleResponseModel:
    """Update an existing role.

    Args:
        role_id: The ID of the role to be updated.
        role_update: The update to be applied to the role.

    Returns:
        The updated role.

    Raises:
        KeyError: if the role does not exist.
    """
update_run(self, run_id, run_update)

Updates a pipeline run.

Parameters:

Name Type Description Default
run_id UUID

The ID of the pipeline run to update.

required
run_update PipelineRunUpdateModel

The update to be applied to the pipeline run.

required

Returns:

Type Description
PipelineRunResponseModel

The updated pipeline run.

Exceptions:

Type Description
KeyError

if the pipeline run doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_run(
    self, run_id: UUID, run_update: PipelineRunUpdateModel
) -> PipelineRunResponseModel:
    """Updates a pipeline run.

    Args:
        run_id: The ID of the pipeline run to update.
        run_update: The update to be applied to the pipeline run.

    Returns:
        The updated pipeline run.

    Raises:
        KeyError: if the pipeline run doesn't exist.
    """
update_run_step(self, step_id, step_update)

Updates a step.

Parameters:

Name Type Description Default
step_id UUID

The ID of the step to update.

required
step_update StepRunUpdateModel

The update to be applied to the step.

required

Returns:

Type Description
StepRunResponseModel

The updated step.

Exceptions:

Type Description
KeyError

if the step doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_run_step(
    self,
    step_id: UUID,
    step_update: StepRunUpdateModel,
) -> StepRunResponseModel:
    """Updates a step.

    Args:
        step_id: The ID of the step to update.
        step_update: The update to be applied to the step.

    Returns:
        The updated step.

    Raises:
        KeyError: if the step doesn't exist.
    """
update_stack(self, stack_id, stack_update)

Update a stack.

Parameters:

Name Type Description Default
stack_id UUID

The ID of the stack update.

required
stack_update StackUpdateModel

The update request on the stack.

required

Returns:

Type Description
StackResponseModel

The updated stack.

Exceptions:

Type Description
KeyError

if the stack doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_stack(
    self, stack_id: UUID, stack_update: StackUpdateModel
) -> StackResponseModel:
    """Update a stack.

    Args:
        stack_id: The ID of the stack update.
        stack_update: The update request on the stack.

    Returns:
        The updated stack.

    Raises:
        KeyError: if the stack doesn't exist.
    """
update_stack_component(self, component_id, component_update)

Update an existing stack component.

Parameters:

Name Type Description Default
component_id UUID

The ID of the stack component to update.

required
component_update ComponentUpdateModel

The update to be applied to the stack component.

required

Returns:

Type Description
ComponentResponseModel

The updated stack component.

Exceptions:

Type Description
KeyError

if the stack component doesn't exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_stack_component(
    self,
    component_id: UUID,
    component_update: ComponentUpdateModel,
) -> ComponentResponseModel:
    """Update an existing stack component.

    Args:
        component_id: The ID of the stack component to update.
        component_update: The update to be applied to the stack component.

    Returns:
        The updated stack component.

    Raises:
        KeyError: if the stack component doesn't exist.
    """
update_team(self, team_id, team_update)

Update an existing team.

Parameters:

Name Type Description Default
team_id UUID

The ID of the team to be updated.

required
team_update TeamUpdateModel

The update to be applied to the team.

required

Returns:

Type Description
TeamResponseModel

The updated team.

Exceptions:

Type Description
KeyError

if the team does not exist.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_team(
    self, team_id: UUID, team_update: TeamUpdateModel
) -> TeamResponseModel:
    """Update an existing team.

    Args:
        team_id: The ID of the team to be updated.
        team_update: The update to be applied to the team.

    Returns:
        The updated team.

    Raises:
        KeyError: if the team does not exist.
    """
update_user(self, user_id, user_update)

Updates an existing user.

Parameters:

Name Type Description Default
user_id UUID

The id of the user to update.

required
user_update UserUpdateModel

The update to be applied to the user.

required

Returns:

Type Description
UserResponseModel

The updated user.

Exceptions:

Type Description
KeyError

If no user with the given name exists.

Source code in zenml/zen_stores/zen_store_interface.py
@abstractmethod
def update_user(
    self, user_id: UUID, user_update: UserUpdateModel
) -> UserResponseModel:
    """Updates an existing user.

    Args:
        user_id: The id of the user to update.
        user_update: The update to be applied to the user.

    Returns:
        The updated user.

    Raises:
        KeyError: If no user with the given name exists.
    """