Skip to content

Applications

plateforme.core.main

This module contains the Plateforme application.

CALLER_REF module-attribute

CALLER_REF = '$'

A special reference to the caller package name.

AppProxy

AppProxy(
    app: Plateforme,
    target: Iterable[_T] | Callable[..., Iterable[_T]],
)

Bases: CollectionProxy[_T], Generic[_T]

An application proxy.

It delegates attribute access to a target object or callable. This class is used internally to proxy the Plateforme application metadata and registry.

Attributes:

Name Type Description
app Plateforme

The application instance.

Initialize an application proxy instance.

Parameters:

Name Type Description Default
app Plateforme

The application instance.

required
target Iterable[_T] | Callable[..., Iterable[_T]]

The target object or callable to proxy to. If the target is a callable, it will be called to retrieve the actual target object. The target object can be any iterable type.

required
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
def __init__(
    self,
    app: 'Plateforme',
    target: Iterable[_T] | Callable[..., Iterable[_T]],
) -> None:
    """Initialize an application proxy instance.

    Args:
        app: The application instance.
        target: The target object or callable to proxy to. If the target is
            a callable, it will be called to retrieve the actual target
            object. The target object can be any iterable type.
    """
    object.__setattr__(self, 'app', app)
    super().__init__(target)

MetaDataProxy

MetaDataProxy(
    app: Plateforme,
    target: Iterable[_T] | Callable[..., Iterable[_T]],
)

Bases: AppProxy[MetaData]

The application metadata proxy class.

Initialize an application proxy instance.

Parameters:

Name Type Description Default
app Plateforme

The application instance.

required
target Iterable[_T] | Callable[..., Iterable[_T]]

The target object or callable to proxy to. If the target is a callable, it will be called to retrieve the actual target object. The target object can be any iterable type.

required
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
def __init__(
    self,
    app: 'Plateforme',
    target: Iterable[_T] | Callable[..., Iterable[_T]],
) -> None:
    """Initialize an application proxy instance.

    Args:
        app: The application instance.
        target: The target object or callable to proxy to. If the target is
            a callable, it will be called to retrieve the actual target
            object. The target object can be any iterable type.
    """
    object.__setattr__(self, 'app', app)
    super().__init__(target)

create_all

create_all(
    bind: str = "default", /, checkfirst: bool = True
) -> None

Create all tables stored in the application metadata.

Conditional by default, will not attempt to recreate tables already present in the target databases.

Parameters:

Name Type Description Default
bind str

An engine alias used to access the database. Defaults to default.

'default'
checkfirst bool

Don't issue CREATE` statements for tables already present in the target database. Defaults toTrue`.

True
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
def create_all(
    self,
    bind: str = 'default',
    /,
    checkfirst: bool = True,
) -> None:
    """Create all tables stored in the application metadata.

    Conditional by default, will not attempt to recreate tables already
    present in the target databases.

    Args:
        bind: An engine alias used to access the database. Defaults to
            ``default``.
        checkfirst: Don't issue ``CREATE` statements for tables already
            present in the target database. Defaults to ``True`.
    """
    # Check if the engine is registered
    if bind not in self.app.database:
        raise PlateformeError(
            f"Cannot create all tables. The engine {bind!r} is not "
            f"registered.",
            code='plateforme-invalid-engine',
        )
    engine = self.app.database.engines[bind]
    # Call proxy methods for create all
    create_all = super().__proxy_getattr__('create_all')
    if callable(create_all):
        create_all(engine, checkfirst=checkfirst)

drop_all

drop_all(
    bind: str = "default", /, checkfirst: bool = True
) -> None

Drop all tables stored in the application metadata.

Conditional by default, will not attempt to drop tables not present in the target database.

Parameters:

Name Type Description Default
bind str

An engine alias used to access the database. Defaults to default.

'default'
checkfirst bool

Only issue DROP statements for tables confirmed to be present in the target database.

True
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
def drop_all(
    self,
    bind: str = 'default',
    /,
    checkfirst: bool = True,
) -> None:
    """Drop all tables stored in the application metadata.

    Conditional by default, will not attempt to drop tables not present in
    the target database.

    Args:
        bind: An engine alias used to access the database. Defaults to
            ``default``.
        checkfirst: Only issue ``DROP`` statements for tables confirmed to
            be present in the target database.
    """
    # Check if the engine is registered
    if bind not in self.app.database:
        raise PlateformeError(
            f"Cannot drop all tables. The engine {bind!r} is not "
            f"registered.",
            code='plateforme-invalid-engine',
        )
    engine = self.app.database.engines[bind]
    # Call proxy methods for drop all
    drop_all = super().__proxy_getattr__('drop_all')
    if callable(drop_all):
        drop_all(engine, checkfirst=checkfirst)

RegistryProxy

RegistryProxy(
    app: Plateforme,
    target: Iterable[_T] | Callable[..., Iterable[_T]],
)

Bases: AppProxy[Registry]

The application registry proxy class.

Initialize an application proxy instance.

Parameters:

Name Type Description Default
app Plateforme

The application instance.

required
target Iterable[_T] | Callable[..., Iterable[_T]]

The target object or callable to proxy to. If the target is a callable, it will be called to retrieve the actual target object. The target object can be any iterable type.

required
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
def __init__(
    self,
    app: 'Plateforme',
    target: Iterable[_T] | Callable[..., Iterable[_T]],
) -> None:
    """Initialize an application proxy instance.

    Args:
        app: The application instance.
        target: The target object or callable to proxy to. If the target is
            a callable, it will be called to retrieve the actual target
            object. The target object can be any iterable type.
    """
    object.__setattr__(self, 'app', app)
    super().__init__(target)

PlateformeMeta

Bases: type

The Plateforme metaclass.

apps property

apps: AppMap

Retrieve all initialized apps from the runtime environment.

namespaces property

namespaces: NamespaceMap

Retrieve all initialized namespaces from the runtime environment.

packages property

packages: PackageMap

Retrieve all initialized packages from the runtime environment.

dependencies property

dependencies: ResourceLinkMap

Retrieve all initialized resource dependencies from the runtime environment.

dependents property

dependents: ResourceLinkMap

Retrieve all initialized resource dependents from the runtime environment.

resources property

resources: ResourceMap

Retrieve all initialized resources from the runtime environment.

Plateforme

Plateforme(
    __settings: Settings | str | None = None,
    **kwargs: Unpack[SettingsDict],
)

Bases: EventEmitter

The Plateforme application class.

Initialize the Plateforme application.

The application settings can be provided either as a Settings instance, a dictionary, or string path to the settings module. If the settings argument is a Settings instance, it will be used as is, othersise the provided arguments are used to initialize the settings.

It is not possible to initialize the Plateforme application with both settings and keyword arguments. Please provide either settings or keyword arguments, not both.

Parameters:

Name Type Description Default
__settings Settings | str | None

The settings to use for the Plateforme application. It can be provided either as a Settings instance, or a string path to the settings module. If the settings argument is a Settings instance, it will be used as is. If the settings argument is a string path to the settings module, it will be imported and used to initialize the settings. Finally, if the settings argument and keyword arguments are not provided, the PLATEFORME_SETTINGS path environment variable will be used to import the settings module or result in default settings if the environment variable is not set. Defaults to None.

None
**kwargs Unpack[SettingsDict]

The keyword arguments to use for the Plateforme application settings. It must adhere to the SettingsDict dictionary schema.

{}
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def __init__(
    self,
    __settings: Settings | str | None = None,
    **kwargs: Unpack[SettingsDict],
) -> None:
    """Initialize the Plateforme application.

    The application settings can be provided either as a `Settings`
    instance, a dictionary, or string path to the settings module.  If the
    settings argument is a `Settings` instance, it will be used as is,
    othersise the provided arguments are used to initialize the settings.

    It is not possible to initialize the Plateforme application with both
    settings and keyword arguments. Please provide either settings or
    keyword arguments, not both.

    Args:
        __settings: The settings to use for the Plateforme application. It
            can be provided either as a `Settings` instance, or a string
            path to the settings module. If the settings argument is a
            `Settings` instance, it will be used as is. If the settings
            argument is a string path to the settings module, it will be
            imported and used to initialize the settings. Finally, if the
            settings argument and keyword arguments are not provided, the
            ``PLATEFORME_SETTINGS`` path environment variable will be used
            to import the settings module or result in default settings if
            the environment variable is not set. Defaults to ``None``.
        **kwargs: The keyword arguments to use for the Plateforme
            application settings. It must adhere to the `SettingsDict`
            dictionary schema.
    """
    super().__init__()
    object.__setattr__(self, '__plateforme_mounted__', False)

    # Get the stack frame of the caller of the constructor and extract its
    # module name and package from the frame information.
    caller_namespace = get_parent_frame_namespace(depth=3, mode='globals')
    caller_namespace = caller_namespace or {}
    caller = caller_namespace.get('__name__', get_root_module_name())
    caller_package = runtime.import_package(caller, force_resolution=True)
    object.__setattr__(self, 'caller', caller)
    object.__setattr__(self, 'caller_package', caller_package)

    # Initialize settings
    if __settings and kwargs:
        raise PlateformeError(
            "Cannot initialize Plateforme application with both settings "
            "and keyword arguments. Please provide either settings or "
            "keyword arguments, not both.",
            code='plateforme-invalid-config',
        )

    settings: Any = __settings or kwargs

    # Validate settings
    if not isinstance(settings, Settings):
        settings_dict: dict[str, Any] = {}

        if isinstance(settings, dict):
            settings_dict = settings
        elif isinstance(settings, str) or settings is None:
            # Retrieve settings path
            if settings is None:
                settings_path = os.getenv('PLATEFORME_SETTINGS')
            else:
                settings_path = settings
            # Import settings module if the settings path is specified and
            # build the settings instance from the imported module
            # attributes.
            if settings_path:
                try:
                    settings_module = import_module(settings_path)
                except ImportError as error:
                    raise ImportError(
                        f"An error occurred while importing the "
                        f"application settings module {settings_path!r}."
                    ) from error
                except Exception as error:
                    raise PlateformeError(
                        f"En error occurred while evaluating the "
                        f"application settings module {settings_path!r}."
                    ) from error
                # Extract settings
                for key in dir(settings_module):
                    if not key.isupper():
                        continue
                    settings_dict[key.lower()] = \
                        getattr(settings_module, key)

        settings = Settings.model_validate(settings_dict)

    object.__setattr__(self, 'settings', settings)

    # Initialize logging
    if self.settings.logging is True:
        setup_logging()
    elif self.settings.logging is not False:
        setup_logging(self.settings.logging)

    # Initialize namespaces and packages
    object.__setattr__(self, 'namespaces', WeakValueDictionary())
    object.__setattr__(self, 'packages', WeakValueDictionary())
    object.__setattr__(self, 'metadata', MetaDataProxy(
        self, lambda: [p.metadata for p in self.packages.values()]
    ))
    object.__setattr__(self, 'registry', RegistryProxy(
        self, lambda: [p.registry for p in self.packages.values()]
    ))

    # Initialize sessions and database
    routers: list[DatabaseRouter] = []
    for name in self.settings.database_routers:
        router_module = import_module(name)
        for router_name in dir(router_module):
            router = getattr(router_module, router_name)
            if inspect.isclass(router) \
                    and issubclass(router, DatabaseRouter) \
                    and not inspect.isabstract(router):
                routers.append(router())
            else:
                raise PlateformeError(
                    f"Cannot import database router {router_name!r} from "
                    f"module {name!r}. The router is not a subclass of "
                    f"`DatabaseRouter`.",
                    code='plateforme-invalid-router',
                )
    object.__setattr__(self, 'database',
        DatabaseManager(self.settings.database_engines, routers))
    object.__setattr__(self, 'async_session',
        async_session_factory(routing=self.database, scoped=True))
    object.__setattr__(self, 'session',
        session_factory(routing=self.database, scoped=True))
    object.__setattr__(self, 'token', None)

    # Finalize setup
    self.add_namespaces(*self.settings.namespaces)
    self.add_packages(*self.settings.packages)
    self.setup_api(reset=True)

    # Register the application
    from .runtime import __plateforme__
    with __plateforme__.lock('apps'):
        if self.name in __plateforme__.apps:
            raise PlateformeError(
                f"Application {str(self)!r} is already registered within "
                f"the runtime environment.",
                code='plateforme-invalid-application',
            )
        __plateforme__.apps[self.name] = self

    # Set application context
    if self.settings.context:
        PLATEFORME_CONTEXT.set(self)

    logger.info(f"({self}) initialized")

name property

name: str

The application title.

title property

title: str

The application title.

on

on(event: str, listener: Callable[..., Any]) -> None

Registers a listener for a given event.

Parameters:

Name Type Description Default
event str

The name of the event.

required
listener Callable[..., Any]

The callback function to invoke when the event is emitted.

required
Source code in .venv/lib/python3.12/site-packages/plateforme/core/events.py
def on(self, event: str, listener: Callable[..., Any]) -> None:
    """Registers a listener for a given event.

    Args:
        event: The name of the event.
        listener: The callback function to invoke when the event is
            emitted.
    """
    if event not in self._listeners:
        self._listeners[event] = []
    self._listeners[event].append(listener)

    if event not in self._listeners:
        self._listeners[event] = []
    if listener not in self._listeners[event]:
        self._listeners[event].append(listener)

emit

emit(event: str, *args: Any, **kwargs: Any) -> None

Emits an event, calling all registered listeners for this event.

Parameters:

Name Type Description Default
event str

The name of the event.

required
args Any

Positional arguments to pass to the listener.

()
kwargs Any

Keyword arguments to pass to the listener.

{}
Source code in .venv/lib/python3.12/site-packages/plateforme/core/events.py
def emit(self, event: str, *args: Any, **kwargs: Any) -> None:
    """Emits an event, calling all registered listeners for this event.

    Args:
        event: The name of the event.
        args: Positional arguments to pass to the listener.
        kwargs: Keyword arguments to pass to the listener.
    """
    for listener in self._listeners.get(event, []):
        listener(*args, **kwargs)

add_namespaces

add_namespaces(
    *args: str | tuple[str, NamespaceSettings],
    raise_errors: bool = True,
) -> None

Add given namespaces to the application.

It adds the provided namespace name to the application with optional settings. The settings are used to configure the namespace behavior within the application.

Parameters:

Name Type Description Default
*args str | tuple[str, NamespaceSettings]

A list of namespace name with optional settings to add to the application. The settings are used to configure the namespace behavior within the application.

()
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True

Raises:

Type Description
PlateformeError

If a namespace is already installed.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def add_namespaces(
    self,
    /,
    *args: str | tuple[str, NamespaceSettings],
    raise_errors: bool = True,
) -> None:
    """Add given namespaces to the application.

    It adds the provided namespace name to the application with optional
    settings. The settings are used to configure the namespace behavior
    within the application.

    Args:
        *args: A list of namespace name with optional settings to add to
            the application. The settings are used to configure the
            namespace behavior within the application.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.

    Raises:
        PlateformeError: If a namespace is already installed.
    """
    for arg in args:
        # Unwrap namespace and settings
        if isinstance(arg, tuple):
            name, settings = arg
        else:
            name = arg
            settings = None
        # Check if namespace is already installed
        if name in self.namespaces:
            if not raise_errors:
                continue
            raise PlateformeError(
                f"Namespace {name!r} already exists within application "
                f"{str(self)!r}.",
                code='plateforme-invalid-namespace',
            )
        # Add namespace
        namespace = runtime.import_namespace(name, create_if_missing=True)
        namespace._add_impl(self, settings=settings)

        logger.info(f"({self}) ns:{name} -> added")

remove_namespaces

remove_namespaces(
    *names: str, raise_errors: bool = True
) -> None

Remove given namespaces from the application.

It removes the provided namespace names from the application and cleans up the auto-imported dependencies.

Parameters:

Name Type Description Default
*names str

A list of namespace names to remove from the application.

()
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True

Raises:

Type Description
PlateformeError

If a namespace does not exist within the application.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def remove_namespaces(
    self,
    /,
    *names: str,
    raise_errors: bool = True,
) -> None:
    """Remove given namespaces from the application.

    It removes the provided namespace names from the application and cleans
    up the auto-imported dependencies.

    Args:
        *names: A list of namespace names to remove from the application.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.

    Raises:
        PlateformeError: If a namespace does not exist within the
            application.
    """
    for name in names:
        if name not in self.namespaces:
            if not raise_errors:
                continue
            raise PlateformeError(
                f"Namespace {name!r} does not exist within application "
                f"{str(self)!r}.",
                code='plateforme-invalid-namespace',
            )
        impl = self.namespaces.pop(name)
        impl.namespace._remove_impl(self)

        logger.info(f"({self}) ns:{name} -> removed")

add_packages

add_packages(
    *args: str | tuple[str, PackageSettings],
    raise_errors: bool = True,
) -> None

Add given packages to the application.

It adds the provided package name to the application with optional settings. The settings are used to configure the package behavior within the application. Finally, it checks for package dependencies and imports them if the auto_import_dependencies setting is enabled.

Parameters:

Name Type Description Default
*args str | tuple[str, PackageSettings]

A list of package name with optional settings to add to the application. The settings are used to configure the package behavior within the application.

()
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True

Raises:

Type Description
PlateformeError

If a package is already installed or if the auto_import_dependencies setting is disabled and a package dependency is not installed.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def add_packages(
    self,
    /,
    *args: str | tuple[str, PackageSettings],
    raise_errors: bool = True,
) -> None:
    """Add given packages to the application.

    It adds the provided package name to the application with optional
    settings. The settings are used to configure the package behavior
    within the application. Finally, it checks for package dependencies and
    imports them if the ``auto_import_dependencies`` setting is enabled.

    Args:
        *args: A list of package name with optional settings to add to
            the application. The settings are used to configure the package
            behavior within the application.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.

    Raises:
        PlateformeError: If a package is already installed or if the
            ``auto_import_dependencies`` setting is disabled and a package
            dependency is not installed.
    """
    # Gather packages to add
    config: dict[str, PackageSettings | None] = dict()
    backlog: dict[str, Package] = dict()

    # Helper function to walk and collect packages
    def walk_dependencies(
        package: Package, guard: frozenset[str] = frozenset()
    ) -> None:
        if package.name in guard:
            return
        backlog[package.name] = package
        package_deps = runtime.get_dependencies(package, kind='packages')
        for package_dep in package_deps:
            if package_dep.name not in self.packages \
                    and not self.settings.auto_import_dependencies:
                if not raise_errors:
                    continue
                raise PlateformeError(
                    f"Cannot import package dependency "
                    f"{package_dep.name!r}. The package dependency is not "
                    f"installed and the `auto_import_dependencies` "
                    f"setting is disabled. Please either install the "
                    f"package manually or set the "
                    f"`auto_import_dependencies` setting to `True`.",
                    code='plateforme-invalid-package',
                )
            walk_dependencies(package_dep, guard | {package.name})

    # Collect packages
    for arg in args:
        # Unwrap package and settings
        if isinstance(arg, tuple):
            name, settings = arg
        else:
            name = arg
            settings = None

        # Handle caller package reference
        if name == CALLER_REF:
            caller_path = resolve_relative_import_name(
                self.caller_package.name,
                self.caller
            )
            caller_settings = PackageSettings(
                api_resources=[caller_path],
                api_services=[caller_path],
            )
            name = self.caller_package.name
            package = self.caller_package
            settings = merge_settings(caller_settings, settings)
        # Handle normal package import
        else:
            package = runtime.import_package(name, force_resolution=False)

        # Check if provided package name is duplicated
        if name in config:
            raise PlateformeError(
                f"Duplicated package {name!r} found within provided "
                f"package names.",
                code='plateforme-invalid-package',
            )

        # Check if package is already installed
        if name in self.packages:
            if not raise_errors:
                continue
            raise PlateformeError(
                f"Package {name!r} already exists within application "
                f"{str(self)!r}.",
                code='plateforme-invalid-package',
            )

        config[name] = settings

        walk_dependencies(package)

    # Add packages
    for name, package in reversed(list(backlog.items())):
        # Skip already installed packages
        if name in self.packages:
            continue

        # Check if package has dependencies not planned to be added
        if check_deps := [
            dependency.name
            for dependency
            in runtime.get_dependencies(package, kind='packages')
            if dependency.name not in backlog
        ]:
            if not raise_errors:
                backlog.pop(name)
                continue
            raise PlateformeError(
                f"Cannot add package {name!r} to the application "
                f"{str(self)!r}. The package has dependencies not planned "
                f"to be added: {', '.join(check_deps)}.",
                code='plateforme-invalid-package',
            )

        package._add_impl(
            self,
            settings=config.get(name, None),
            auto_generated=name not in config,
            auto_import_namespace=self.settings.auto_import_namespaces,
        )

        logger.info(f"({self}) pkg:{name} -> added")

remove_packages

remove_packages(
    *args: str, raise_errors: bool = True
) -> None

Remove given packages from the application.

It removes the provided package names from the application and cleans up the auto-imported dependencies.

Parameters:

Name Type Description Default
*args str

A list of package module names to remove from the application.

()
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True

Raises:

Type Description
PlateformeError

If a package does not exist within the application or if the package has dependents not planned to be removed.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def remove_packages(
    self,
    /,
    *args: str,
    raise_errors: bool = True,
) -> None:
    """Remove given packages from the application.

    It removes the provided package names from the application and cleans
    up the auto-imported dependencies.

    Args:
        *args: A list of package module names to remove from the
            application.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.

    Raises:
        PlateformeError: If a package does not exist within the application
            or if the package has dependents not planned to be removed.
    """
    # Gather packages to remove
    config: set[str] = set()
    backlog: dict[str, Package] = dict()

    # Helper function to walk and collect packages
    def walk_dependencies(
        package: Package, guard: frozenset[str] = frozenset()
    ) -> None:
        if package.name in guard:
            return
        backlog[package.name] = package
        package_deps = runtime.get_dependencies(package, kind='packages')
        for package_dep in package_deps:
            if not package_dep.impl.auto_generated:
                continue
            if not all(
                dependent.name in backlog
                for dependent
                in runtime.get_dependents(package_dep, kind='packages')
            ):
                continue
            walk_dependencies(package_dep, guard | {package.name})

    # Collect packages
    for name in args:
        # Handle caller module reference
        if name == CALLER_REF:
            name = self.caller_package.name

        # Check if provided package name is duplicated
        if name in config:
            raise PlateformeError(
                f"Duplicated package {name!r} found within provided "
                f"package names.",
                code='plateforme-invalid-package',
            )

        # Check if package is installed
        if name not in self.packages:
            if not raise_errors:
                continue
            raise PlateformeError(
                f"Package {name!r} does not exist within application "
                f"{str(self)!r}.",
                code='plateforme-invalid-package',
            )
        else:
            config.add(name)

        package = self.packages[name].package
        walk_dependencies(package)

    # Remove packages
    for name, package in list(backlog.items()):
        # Check if package has dependents not planned to be removed
        if check_deps := [
            dependent.name
            for dependent
            in runtime.get_dependents(package, kind='packages')
            if dependent.name not in backlog
        ]:
            if not raise_errors:
                backlog.pop(name)
                continue
            raise PlateformeError(
                f"Cannot remove package {name!r} from the application "
                f"{str(self)!r}. The package has dependents not planned "
                f"to be removed: {', '.join(check_deps)}.",
                code='plateforme-invalid-package',
            )

        package._remove_impl(self)

        logger.info(f"({self}) pkg:{name} -> removed")

setup_api

setup_api(*, reset: bool = False) -> None

Setup the application API manager.

Parameters:

Name Type Description Default
reset bool

Whether to reset the application API manager, i.e. clear all existing routes from current router. Defaults to False.

False
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def setup_api(self, *, reset: bool = False) -> None:
    """Setup the application API manager.

    Args:
        reset: Whether to reset the application API manager, i.e. clear all
            existing routes from current router. Defaults to ``False``.
    """
    # Resolve base configuration
    debug = self.settings.debug
    title = self.settings.title
    summary = self.settings.summary
    description = self.settings.description
    version = self.settings.version
    terms_of_service = self.settings.terms_of_service
    contact = self.settings.contact
    license_info = self.settings.license
    deprecated = self.settings.deprecated

    # Documentation is available only at the namespace level
    self.settings.api.openapi_url = None

    # Update API middleware
    self.settings.api.middleware = [
        Middleware(BulkMiddleware),
        *(self.settings.api.middleware or []),
    ]

    # Build configuration dictionary
    config: dict[str, Any] = dict(
        debug=debug,
        title=title,
        summary=summary,
        description=description,
        version=version,
        terms_of_service=terms_of_service,
        contact=contact.model_dump() if contact else None,
        license_info=license_info.model_dump() if license_info else None,
        deprecated=deprecated,
        **self.settings.api.model_dump(),
    )

    # Setup manager and include current router if no reset
    router = None
    if not reset:
        router = self.api.router
    object.__setattr__(self, 'api', APIManager(**config))
    if not reset:
        assert router is not None
        self.api.include_router(router)

    # Add exception handlers
    for error, handler in EXCEPTION_HANDLERS.items():
        self.api.add_exception_handler(error, handler)

mount_namespaces

mount_namespaces(
    *names: str,
    force: bool = False,
    raise_errors: bool = True,
    propagate: bool = False,
    **overrides: Unpack[APIBaseRouterConfigDict],
) -> None

Mount given namespaces into the application API manager.

Parameters:

Name Type Description Default
*names str

A list of namespace names to mount into the application namespace API manager

()
force bool

Whether to force mount the namespaces even if they are already mounted. This will not raise an error if a namespace is already mounted, otherwise it will replace the existing namespace router with a new one. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently if a namespace is already mounted within the API manager. Defaults to True.

True
propagate bool

Whether to propagate the mount operation to the namespaces. Defaults to False.

False
**overrides Unpack[APIBaseRouterConfigDict]

Additional router configuration keyword arguments to override the default router configuration when including the namespace package routers.

{}
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def mount_namespaces(
    self,
    *names: str,
    force: bool = False,
    raise_errors: bool = True,
    propagate: bool = False,
    **overrides: Unpack[APIBaseRouterConfigDict],
) -> None:
    """Mount given namespaces into the application API manager.

    Args:
        *names: A list of namespace names to mount into the application
            namespace API manager
        force: Whether to force mount the namespaces even if they are
            already mounted. This will not raise an error if a namespace is
            already mounted, otherwise it will replace the existing
            namespace router with a new one. Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently if a
            namespace is already mounted within the API manager.
            Defaults to ``True``.
        propagate: Whether to propagate the mount operation to the
            namespaces. Defaults to ``False``.
        **overrides: Additional router configuration keyword arguments to
            override the default router configuration when including the
            namespace package routers.
    """
    namespaces = self._validate_namespace_names(
        *names, raise_errors=raise_errors
    )

    # Mount namespaces
    for namespace in namespaces:
        for route in self.api.routes:
            if namespace.slug != getattr(route, 'name', None):
                continue
            if not force:
                if not raise_errors:
                    continue
                raise PlateformeError(
                    f"Namespace {namespace.name!r} is already mounted "
                    f"within application {str(self)!r}.",
                    code='plateforme-invalid-namespace',
                )
            self.api.routes.remove(route)

        if propagate:
            namespace.mount(
                force=force,
                raise_errors=raise_errors,
                **overrides
            )
        self.api.mount(namespace.path, namespace.api, namespace.slug)
        self._sort_api_routes()

        logger.info(f"({self}) ns:{namespace} -> mounted")

unmount_namespaces

unmount_namespaces(
    *names: str,
    raise_errors: bool = True,
    propagate: bool = False,
) -> None

Unmount given namespaces from the application API manager.

Parameters:

Name Type Description Default
*names str

A list of namespace names to unmount from the application namespace API manager.

()
raise_errors bool

Whether to raise errors or fail silently if a namespace is not mounted within the API manager. Defaults to True.

True
propagate bool

Whether to propagate the unmount operation to the namespaces. Defaults to False.

False
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def unmount_namespaces(
    self,
    *names: str,
    raise_errors: bool = True,
    propagate: bool = False,
) -> None:
    """Unmount given namespaces from the application API manager.

    Args:
        *names: A list of namespace names to unmount from the application
            namespace API manager.
        raise_errors: Whether to raise errors or fail silently if a
            namespace is not mounted within the API manager.
            Defaults to ``True``.
        propagate: Whether to propagate the unmount operation to the
            namespaces. Defaults to ``False``.
    """
    namespaces = self._validate_namespace_names(
        *names, raise_errors=raise_errors
    )

    # Unmount namespaces
    for namespace in namespaces:
        has_routes = False
        for route in self.api.routes:
            if namespace.slug != getattr(route, 'name', None):
                continue
            self.api.routes.remove(route)
            has_routes = True
        if not has_routes and raise_errors:
            raise PlateformeError(
                f"Namespace {namespace.name!r} is not mounted within "
                f"application {str(self)!r}.",
                code='plateforme-invalid-namespace'
            )
        if propagate:
            namespace.unmount(raise_errors=raise_errors)

        logger.info(f"({self}) ns:{namespace} -> unmounted")

mount_packages

mount_packages(
    *names: str,
    force: bool = False,
    raise_errors: bool = True,
    **overrides: Unpack[APIBaseRouterConfigDict],
) -> None

Mount given packages into the application API manager.

Parameters:

Name Type Description Default
*names str

A list of package module names to mount into the application API manager.

()
force bool

Whether to force mount the packages even if they are already mounted. This will not raise an error if a package is already mounted, otherwise it will replace the existing package router with a new one. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently if a package is already mounted within the API manager. Defaults to True.

True
**overrides Unpack[APIBaseRouterConfigDict]

Additional router configuration keyword arguments to override the default router configuration when including the package routers.

{}
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def mount_packages(
    self,
    *names: str,
    force: bool = False,
    raise_errors: bool = True,
    **overrides: Unpack[APIBaseRouterConfigDict],
) -> None:
    """Mount given packages into the application API manager.

    Args:
        *names: A list of package module names to mount into the
            application API manager.
        force: Whether to force mount the packages even if they are already
            mounted. This will not raise an error if a package is already
            mounted, otherwise it will replace the existing package router
            with a new one. Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently if a package
            is already mounted within the API manager.
            Defaults to ``True``.
        **overrides: Additional router configuration keyword arguments to
            override the default router configuration when including the
            package routers.
    """
    packages = self._validate_package_names(
        *names, raise_errors=raise_errors
    )

    # Collect namespaces
    namespaces: dict[NamespaceImpl, set[PackageImpl]] = {}
    for package in packages:
        namespaces[package.namespace] = \
            namespaces.get(package.namespace, set()) | {package}

    # Mount packages
    for namespace, namespace_packages in namespaces.items():
        for route in self.api.routes:
            if namespace.slug != getattr(route, 'name', None):
                continue
            if not force:
                if not raise_errors:
                    continue
                raise PlateformeError(
                    f"Namespace {namespace.name!r} is not mounted within "
                    f"application {str(self)!r}.",
                    code='plateforme-invalid-namespace',
                )
            self.api.routes.remove(route)
        namespace.mount(
            *[package.name for package in namespace_packages],
            force=force,
            raise_errors=raise_errors,
            **overrides,
        )
        self.api.mount(namespace.path, namespace.api, namespace.name)
        self._sort_api_routes()

unmount_packages

unmount_packages(
    *names: str, raise_errors: bool = True
) -> None

Unmount given packages from the application API manager.

Parameters:

Name Type Description Default
*names str

A list of package module names to unmount from the application API manager.

()
raise_errors bool

Whether to raise errors or fail silently if a package is not mounted within the API manager. Defaults to True.

True
Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
@emit()
def unmount_packages(
    self,
    *names: str,
    raise_errors: bool = True,
) -> None:
    """Unmount given packages from the application API manager.

    Args:
        *names: A list of package module names to unmount from the
            application API manager.
        raise_errors: Whether to raise errors or fail silently if a package
            is not mounted within the API manager.
            Defaults to ``True``.
    """
    packages = self._validate_package_names(
        *names, raise_errors=raise_errors
    )

    # Collect namespaces
    namespaces: dict[NamespaceImpl, set[PackageImpl]] = {}
    for package in packages:
        namespaces[package.namespace] = \
            namespaces.get(package.namespace, set()) | {package}

    # Unmount packages
    for namespace, namespace_packages in namespaces.items():
        has_routes = False
        for route in self.api.routes:
            if namespace.slug != getattr(route, 'name', None):
                continue
            self.api.routes.remove(route)
            has_routes = True
        if not has_routes and raise_errors:
            raise PlateformeError(
                f"Namespace {namespace.name!r} is not mounted within a"
                f"application {str(self)!r}.",
                code='plateforme-invalid-namespace'
            )
        namespace.unmount(
            *[package.name for package in namespace_packages],
            raise_errors=raise_errors,
        )

get_dependencies

get_dependencies(
    *,
    kind: Literal["links"],
    status: tuple[Lifecycle, ...] | None = None,
    max_depth: int | None = 1,
) -> set[ResourceFieldInfo]
get_dependencies(
    *,
    kind: Literal["resources"],
    status: tuple[ResolvedState, ...],
    max_depth: int | None = 1,
) -> set[ResourceType]
get_dependencies(
    *,
    kind: Literal["resources"],
    status: tuple[Literal[UNKNOWN], ...],
    max_depth: int | None = 1,
) -> set[str]
get_dependencies(
    *,
    kind: Literal["resources"],
    status: tuple[Lifecycle, ...] | None = None,
    max_depth: int | None = 1,
) -> set[ResourceType | str]
get_dependencies(
    *,
    kind: Literal["packages"],
    status: tuple[Lifecycle, ...] | None = None,
    max_depth: int | None = 1,
) -> set[Package]
get_dependencies(
    *,
    kind: Literal["links", "resources", "packages"],
    status: tuple[Lifecycle, ...] | None = None,
    max_depth: int | None = 1,
) -> (
    set[ResourceFieldInfo]
    | set[ResourceType | str]
    | set[ResourceType]
    | set[str]
    | set[Package]
)

Collect the dependencies of the application.

This method returns the dependencies of the application based on the specified kind. It filters the runtime dependencies class dictionary to return only the dependencies of this application.

Parameters:

Name Type Description Default
kind Literal['links', 'resources', 'packages']

The kind of dependencies to retrieve. It can be one of the following values - 'links': Returns the linked field dependencies the resources from this application rely on. - 'resources': Returns the resource dependencies the resources from this application rely on. - 'packages': Returns the package dependencies the resources from this application rely on.

required
status tuple[Lifecycle, ...] | None

The tuple of dependencies lifecycle status to filter. Note that only the 'links' and 'resources' kinds support having this argument to include Lifecycle.UNKNOWN, where for the latter, it returns a set of the fully qualified names of the unresolved resources. When set to None, it returns all the dependencies regardless of their status, except for the kind 'packages' where only resolved dependencies are returned. Defaults to None.

None
max_depth int | None

The maximum depth of dependencies to retrieve. If set to None, it retrieves all dependencies no matter the depth. Defaults to 1, meaning that it retrieves only the direct dependencies.

1

Returns:

Type Description
set[ResourceFieldInfo] | set[ResourceType | str] | set[ResourceType] | set[str] | set[Package]

The specified kind dependencies of the application.

Raises:

Type Description
ValueError

If the lifecycle status is invalid, i.e. when the filter includes Lifecycle.UNKNOWN for package dependencies.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
def get_dependencies(
    self,
    *,
    kind: Literal['links', 'resources', 'packages'],
    status: tuple[Lifecycle, ...] | None = None,
    max_depth: int | None = 1,
) -> (
    set[ResourceFieldInfo]
    | set[ResourceType | str] | set[ResourceType] | set[str]
    | set[Package]
):
    """Collect the dependencies of the application.

    This method returns the dependencies of the application based on the
    specified kind. It filters the runtime `dependencies` class dictionary
    to return only the dependencies of this application.

    Args:
        kind: The kind of dependencies to retrieve. It can be one of the
            following values
            - ``'links'``: Returns the linked field dependencies the
                resources from this application rely on.
            - ``'resources'``: Returns the resource dependencies the
                resources from this application rely on.
            - ``'packages'``: Returns the package dependencies the
                resources from this application rely on.
        status: The tuple of dependencies lifecycle status to filter. Note
            that only the ``'links'`` and ``'resources'`` kinds support
            having this argument to include `Lifecycle.UNKNOWN`, where for
            the latter, it returns a set of the fully qualified names of
            the unresolved resources. When set to ``None``, it returns all
            the dependencies regardless of their status, except for the
            kind ``'packages'`` where only resolved dependencies are
            returned. Defaults to ``None``.
        max_depth: The maximum depth of dependencies to retrieve. If set to
            ``None``, it retrieves all dependencies no matter the depth.
            Defaults to ``1``, meaning that it retrieves only the direct
            dependencies.

    Returns:
        The specified kind dependencies of the application.

    Raises:
        ValueError: If the lifecycle status is invalid, i.e. when the
            filter includes `Lifecycle.UNKNOWN` for package dependencies.
    """
    return runtime.get_dependencies(
        [impl.package for impl in self.packages.values()],
        kind=kind,
        status=status,
        max_depth=max_depth,
    )

get_dependents

get_dependents(
    *,
    kind: Literal["links"],
    status: tuple[ResolvedState, ...] | None = None,
    max_depth: int | None = 1,
) -> set[ResourceFieldInfo]
get_dependents(
    *,
    kind: Literal["resources"],
    status: tuple[ResolvedState, ...] | None = None,
    max_depth: int | None = 1,
) -> set[ResourceType]
get_dependents(
    *,
    kind: Literal["packages"],
    status: tuple[ResolvedState, ...] | None = None,
    max_depth: int | None = 1,
) -> set[Package]
get_dependents(
    *,
    kind: Literal["links", "resources", "packages"],
    status: tuple[ResolvedState, ...] | None = None,
    max_depth: int | None = 1,
) -> (
    set[ResourceFieldInfo]
    | set[ResourceType]
    | set[Package]
)

Collect the dependents of the application.

This method returns the dependents of the application based on the specified kind. It filters the runtime dependents class dictionary to return only the dependents of this application.

Parameters:

Name Type Description Default
kind Literal['links', 'resources', 'packages']

The kind of dependents to retrieve. It can be one of the following values - 'links': Returns the linked field dependents that rely on the resources from this application. - 'resources': Returns the resource dependents that rely on the resources from this application. - 'packages': Returns the package dependents that rely on the resources from this application.

required
status tuple[ResolvedState, ...] | None

The tuple of dependents lifecycle status to filter. Note that the Lifecycle.UNKNOWN status is not supported for dependents as they are always resolved when evaluated. When set to None, it returns all the dependents regardless of their lifecycle status. Defaults to None.

None
max_depth int | None

The maximum depth of dependents to retrieve. If set to None, it retrieves all dependents no matter the depth. Defaults to 1, meaning that it retrieves only the direct dependents.

1

Returns:

Type Description
set[ResourceFieldInfo] | set[ResourceType] | set[Package]

The specified kind dependents of the application.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
def get_dependents(
    self,
    *,
    kind: Literal['links', 'resources', 'packages'],
    status: tuple[ResolvedState, ...] | None = None,
    max_depth: int | None = 1,
) -> (
    set[ResourceFieldInfo]
    | set[ResourceType]
    | set[Package]
):
    """Collect the dependents of the application.

    This method returns the dependents of the application based on the
    specified kind. It filters the runtime `dependents` class dictionary
    to return only the dependents of this application.

    Args:
        kind: The kind of dependents to retrieve. It can be one of the
            following values
            - ``'links'``: Returns the linked field dependents that rely on
                the resources from this application.
            - ``'resources'``: Returns the resource dependents that rely on
                the resources from this application.
            - ``'packages'``: Returns the package dependents that rely on
                the resources from this application.
        status: The tuple of dependents lifecycle status to filter. Note
            that the `Lifecycle.UNKNOWN` status is not supported for
            dependents as they are always resolved when evaluated. When set
            to ``None``, it returns all the dependents regardless of their
            lifecycle status. Defaults to ``None``.
        max_depth: The maximum depth of dependents to retrieve. If set to
            ``None``, it retrieves all dependents no matter the depth.
            Defaults to ``1``, meaning that it retrieves only the direct
            dependents.

    Returns:
        The specified kind dependents of the application.
    """
    return runtime.get_dependents(
        [impl.package for impl in self.packages.values()],
        kind=kind,
        status=status,
        max_depth=max_depth,
    )

get_resources

get_resources() -> set[ResourceType]

Collect the resources of the application.

A method that filters the runtime resources class dictionary to return only the resources of this application.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/main.py
def get_resources(self) -> set[ResourceType]:
    """Collect the resources of the application.

    A method that filters the runtime `resources` class dictionary to
    return only the resources of this application.
    """
    return runtime.get_resources(
        [impl.package for impl in self.packages.values()]
    )

plateforme.core.projects

This module provides utilities for managing project configurations within the Plateforme framework.

PROJECT_FILES module-attribute

PROJECT_FILES = ('config.toml', 'pyproject.toml')

A list of valid project configuration file names for apps and packages.

ProjectAppInfo

ProjectAppInfo(**data: Any)

Bases: BaseModel

Project application information.

Initialize a model instance.

It initializes a model instance by parsing and validating input data from the data keyword arguments.

Parameters:

Name Type Description Default
**data Any

The input data to initialize the model instance.

{}

Raises:

Type Description
ValidationError

If the object could not be validated.

Note

The argument self is explicitly positional-only to allow self as a field name and data keyword argument.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def __init__(self, /, **data: Any) -> None:
    """Initialize a model instance.

    It initializes a model instance by parsing and validating input data
    from the `data` keyword arguments.

    Args:
        **data: The input data to initialize the model instance.

    Raises:
        ValidationError: If the object could not be validated.

    Note:
        The argument ``self`` is explicitly positional-only to allow
        ``self`` as a field name and data keyword argument.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    self.__pydantic_validator__.validate_python(data, self_instance=self)

model_extra property

model_extra: dict[str, Any] | None

Get extra fields set during validation.

Returns:

Type Description
dict[str, Any] | None

A dictionary of extra fields, or None if config.extra is not set to "allow".

model_fields_set property

model_fields_set: set[str]

Returns the set of fields that have been explicitly set on this model instance.

Returns:

Type Description
set[str]

A set of strings representing the fields that have been set, i.e. that were not filled from defaults.

model_construct classmethod

model_construct(
    _fields_set: set[str] | None = None, **data: Any
) -> Model

Creates a new instance of the model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed. It behaves as if model_config.extra = 'allow' was set since it adds all passed values.

Parameters:

Name Type Description Default
_fields_set set[str] | None

The set of field names accepted by the model instance.

None
**data Any

Trusted or pre-validated input data to initialize the model. It is used to set the __dict__ attribute of the model.

{}

Returns:

Type Description
Model

A new instance of the model class with validated data.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_construct(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    _fields_set: set[str] | None = None,
    **data: Any,
) -> Model:
    """Creates a new instance of the model class with validated data.

    Creates a new model setting `__dict__` and `__pydantic_fields_set__`
    from trusted or pre-validated data. Default values are respected, but
    no other validation is performed. It behaves as if
    `model_config.extra = 'allow'` was set since it adds all passed values.

    Args:
        _fields_set: The set of field names accepted by the model instance.
        **data: Trusted or pre-validated input data to initialize the
            model. It is used to set the `__dict__` attribute of the model.

    Returns:
        A new instance of the model class with validated data.
    """
    model = super().model_construct(_fields_set, **data)

    # Remove default initialization of instrumented resource fields, as
    # they are not needed when constructing a resource instance directly,
    # i.e. defaults are already set and stored in the database.
    if cls.__pydantic_owner__ == 'resource':
        resource = cls.__pydantic_resource__
        for name in getattr(resource, 'resource_attributes'):
            if _fields_set and name in _fields_set:
                continue
            model.__dict__.pop(name, None)

    return model

model_copy

model_copy(
    *,
    update: dict[str, Any] | None = None,
    deep: bool = False,
) -> Model

Returns a copy of the model.

Parameters:

Name Type Description Default
update dict[str, Any] | None

Values to add/modify within the new model. Note that if assignment validation is not set to True, the integrity of the data is not validated when creating the new model. Data should be trusted or pre-validated in this case.

None
deep bool

Set to True to make a deep copy of the model.

False

Returns:

Type Description
Model

A new copy of the model instance with the updated values.

Raises:

Type Description
ValidationError

If the object could not be validated.

ValueError

If strict or context are set when validate_assignment is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_copy(  # type: ignore[override, unused-ignore]
    self: Model,
    *,
    update: dict[str, Any] | None = None,
    deep: bool = False,
) -> Model:
    """Returns a copy of the model.

    Args:
        update: Values to add/modify within the new model. Note that if
            assignment validation is not set to ``True``, the integrity of
            the data is not validated when creating the new model. Data
            should be trusted or pre-validated in this case.
        deep: Set to ``True`` to make a deep copy of the model.

    Returns:
        A new copy of the model instance with the updated values.

    Raises:
        ValidationError: If the object could not be validated.
        ValueError: If `strict` or `context` are set when
            `validate_assignment` is set to ``False``.
    """
    copied = self.__deepcopy__() if deep else self.__copy__()
    if update:
        copied.model_update(update, from_attributes=False)
    return copied

model_dump

model_dump(
    *,
    mode: Literal["json", "python", "raw"] | str = "python",
    include: IncEx | None = None,
    exclude: IncEx | None = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> dict[str, Any]

Generate a dictionary representation of the model.

It is used to dump the model instance to a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default
mode Literal['json', 'python', 'raw'] | str

The mode in which to_python should run: - If mode is json, the output will only contain JSON serializable types. - If mode is python, the output may contain non JSON serializable Python objects. - If mode is raw, the output will contain raw values. Defaults to python.

'python'
include IncEx | None

A list of fields to include in the output. Defaults to None.

None
exclude IncEx | None

A list of fields to exclude from the output. Defaults to None.

None
by_alias bool

Whether to use the field's alias in the dictionary key if defined. Defaults to False.

False
exclude_unset bool

Whether to exclude fields that have not been explicitly set. Defaults to False.

False
exclude_defaults bool

Whether to exclude fields that are set to their default value. Defaults to False.

False
exclude_none bool

Whether to exclude fields that have a value of None. Defaults to False.

False
round_trip bool

If True, dumped values should be valid as input for non-idempotent types such as Json[T]. Defaults to False.

False
warnings bool

Whether to log warnings when invalid fields are encountered. Defaults to True.

True

Returns:

Type Description
dict[str, Any]

A dictionary representation of the model.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_dump(  # type: ignore[override, unused-ignore]
    self,
    *,
    mode: Literal['json', 'python', 'raw'] | str = 'python',
    include: IncEx | None = None,
    exclude: IncEx | None = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> dict[str, Any]:
    """Generate a dictionary representation of the model.

    It is used to dump the model instance to a dictionary representation of
    the model, optionally specifying which fields to include or exclude.

    Args:
        mode: The mode in which `to_python` should run:
            - If mode is ``json``, the output will only contain JSON
                serializable types.
            - If mode is ``python``, the output may contain non JSON
                serializable Python objects.
            - If mode is ``raw``, the output will contain raw values.
            Defaults to ``python``.
        include: A list of fields to include in the output.
            Defaults to ``None``.
        exclude: A list of fields to exclude from the output.
            Defaults to ``None``.
        by_alias: Whether to use the field's alias in the dictionary key if
            defined. Defaults to ``False``.
        exclude_unset: Whether to exclude fields that have not been
            explicitly set. Defaults to ``False``.
        exclude_defaults: Whether to exclude fields that are set to their
            default value. Defaults to ``False``.
        exclude_none: Whether to exclude fields that have a value of
            ``None``. Defaults to ``False``.
        round_trip: If ``True``, dumped values should be valid as input for
            non-idempotent types such as `Json[T]`. Defaults to ``False``.
        warnings: Whether to log warnings when invalid fields are
            encountered. Defaults to ``True``.

    Returns:
        A dictionary representation of the model.
    """
    if mode != 'raw':
        return self.__pydantic_serializer__.to_python(  # type: ignore
            self,
            mode=mode,
            by_alias=by_alias,
            include=include,  # type: ignore
            exclude=exclude,  # type: ignore
            exclude_unset=exclude_unset,
            exclude_defaults=exclude_defaults,
            exclude_none=exclude_none,
            round_trip=round_trip,
            warnings=warnings,
        )

    # Handle raw mode
    result: dict[str, Any] = {}
    for field_name, field_info in self.model_fields.items():
        if not hasattr(self, field_name):
            continue
        value = getattr(self, field_name)
        # Skip excluded fields
        if include is not None and field_name not in include:
            continue
        if exclude is not None and field_name in exclude:
            continue
        if exclude_unset and field_name not in self.model_fields_set:
            continue
        if exclude_defaults and value == field_info.default:
            continue
        if exclude_none and value is None:
            continue
        # Add field value
        if by_alias and field_info.alias:
            result[field_info.alias] = value
        else:
            result[field_name] = value
    return result

model_dump_json

model_dump_json(
    *,
    indent: int | None = None,
    include: IncEx = None,
    exclude: IncEx = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> str

Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default
indent int | None

Indentation to use in the JSON output. If None is passed, the output will be compact.

None
include IncEx

Field(s) to include in the JSON output.

None
exclude IncEx

Field(s) to exclude from the JSON output.

None
by_alias bool

Whether to serialize using field aliases.

False
exclude_unset bool

Whether to exclude fields that have not been explicitly set.

False
exclude_defaults bool

Whether to exclude fields that are set to their default value.

False
exclude_none bool

Whether to exclude fields that have a value of None.

False
round_trip bool

If True, dumped values should be valid as input for non-idempotent types such as Json[T].

False
warnings bool

Whether to log warnings when invalid fields are encountered.

True

Returns:

Type Description
str

A JSON string representation of the model.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
def model_dump_json(
    self,
    *,
    indent: int | None = None,
    include: IncEx = None,
    exclude: IncEx = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> str:
    """Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json

    Generates a JSON representation of the model using Pydantic's `to_json` method.

    Args:
        indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
        include: Field(s) to include in the JSON output.
        exclude: Field(s) to exclude from the JSON output.
        by_alias: Whether to serialize using field aliases.
        exclude_unset: Whether to exclude fields that have not been explicitly set.
        exclude_defaults: Whether to exclude fields that are set to their default value.
        exclude_none: Whether to exclude fields that have a value of `None`.
        round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
        warnings: Whether to log warnings when invalid fields are encountered.

    Returns:
        A JSON string representation of the model.
    """
    return self.__pydantic_serializer__.to_json(
        self,
        indent=indent,
        include=include,
        exclude=exclude,
        by_alias=by_alias,
        exclude_unset=exclude_unset,
        exclude_defaults=exclude_defaults,
        exclude_none=exclude_none,
        round_trip=round_trip,
        warnings=warnings,
    ).decode()

model_json_schema classmethod

model_json_schema(
    by_alias: bool = True,
    ref_template: str = DEFAULT_REF_TEMPLATE,
    schema_generator: type[
        GenerateJsonSchema
    ] = GenerateJsonSchema,
    mode: JsonSchemaMode = "validation",
    source: JsonSchemaSource = "model",
) -> dict[str, Any]

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default
by_alias bool

Whether to use field aliases when generating the schema, i.e. if True, fields will be serialized according to their alias, otherwise according to their attribute name. Defaults to True.

True
ref_template str

The template format string used when generating reference names. Defaults to DEFAULT_REF_TEMPLATE.

DEFAULT_REF_TEMPLATE
schema_generator type[GenerateJsonSchema]

The class to use for generating the JSON Schema.

GenerateJsonSchema
mode JsonSchemaMode

The mode to use for generating the JSON Schema. It can be either validation or serialization where respectively the schema is generated for validating data or serializing data. Defaults to validation.

'validation'
source JsonSchemaSource

The source type to use for generating the resources JSON schema. It can be either key , model, or both where the latter accepts, when applicable, integer and string values for key identifiers in addition to the standard model schema generation. Defaults to model.

'model'

Returns:

Type Description
dict[str, Any]

The generated JSON schema of the model class.

Note

The schema generator class can be overridden to customize the logic used to generate the JSON schema. This can be done by subclassing the GenerateJsonSchema class and passing the subclass as the schema_generator argument.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_json_schema(  # type: ignore[override, unused-ignore]
    cls,
    by_alias: bool = True,
    ref_template: str = DEFAULT_REF_TEMPLATE,
    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
    mode: JsonSchemaMode = 'validation',
    source: JsonSchemaSource = 'model',
) -> dict[str, Any]:
    """Generates a JSON schema for a model class.

    Args:
        by_alias: Whether to use field aliases when generating the schema,
            i.e. if ``True``, fields will be serialized according to their
            alias, otherwise according to their attribute name.
            Defaults to ``True``.
        ref_template: The template format string used when generating
            reference names. Defaults to ``DEFAULT_REF_TEMPLATE``.
        schema_generator: The class to use for generating the JSON Schema.
        mode: The mode to use for generating the JSON Schema. It can be
            either ``validation`` or ``serialization`` where respectively
            the schema is generated for validating data or serializing
            data. Defaults to ``validation``.
        source: The source type to use for generating the resources JSON
            schema. It can be either ``key`` , ``model``, or ``both`` where
            the latter accepts, when applicable, integer and string values
            for key identifiers in addition to the standard model schema
            generation. Defaults to ``model``.

    Returns:
        The generated JSON schema of the model class.

    Note:
        The schema generator class can be overridden to customize the
        logic used to generate the JSON schema. This can be done by
        subclassing the `GenerateJsonSchema` class and passing the subclass
        as the `schema_generator` argument.
    """
    schema_generator_instance = schema_generator(
        by_alias=by_alias, ref_template=ref_template
    )
    if isinstance(cls.__pydantic_validator__, _mock_val_ser.MockValSer):
        cls.__pydantic_validator__.rebuild()
    return schema_generator_instance.generate(
        cls.__pydantic_core_schema__, mode=mode, source=source
    )

model_parametrized_name classmethod

model_parametrized_name(
    params: tuple[type[Any], ...],
) -> str

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default
params tuple[type[Any], ...]

Tuple of types of the class. Given a generic class Model with 2 type variables and a concrete model Model[str, int], the value (str, int) would be passed to params.

required

Returns:

Type Description
str

String representing the new class where params are passed to cls as type variables.

Raises:

Type Description
TypeError

Raised when trying to generate concrete names for non-generic models.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
@classmethod
def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
    """Compute the class name for parametrizations of generic classes.

    This method can be overridden to achieve a custom naming scheme for generic BaseModels.

    Args:
        params: Tuple of types of the class. Given a generic class
            `Model` with 2 type variables and a concrete model `Model[str, int]`,
            the value `(str, int)` would be passed to `params`.

    Returns:
        String representing the new class where `params` are passed to `cls` as type variables.

    Raises:
        TypeError: Raised when trying to generate concrete names for non-generic models.
    """
    if not issubclass(cls, typing.Generic):
        raise TypeError('Concrete names should only be generated for generic models.')

    # Any strings received should represent forward references, so we handle them specially below.
    # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future,
    # we may be able to remove this special case.
    param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params]
    params_component = ', '.join(param_names)
    return f'{cls.__name__}[{params_component}]'

model_post_init

model_post_init(__context: Any) -> None

Post-initialization method for the model class.

Override this method to perform additional initialization after the __init__ and model_construct methods have been called. This is useful in scenarios where it is necessary to perform additional initialization steps after the model has been fully initialized.

Parameters:

Name Type Description Default
__context Any

The context object passed to the model instance.

required
Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_post_init(self, __context: Any) -> None:
    """Post-initialization method for the model class.

    Override this method to perform additional initialization after the
    `__init__` and `model_construct` methods have been called. This is
    useful in scenarios where it is necessary to perform additional
    initialization steps after the model has been fully initialized.

    Args:
        __context: The context object passed to the model instance.
    """
    ...

model_rebuild classmethod

model_rebuild(
    *,
    force: bool = False,
    raise_errors: bool = True,
    _parent_namespace_depth: int = 2,
    _types_namespace: dict[str, Any] | None = None,
) -> bool | None

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default
force bool

Whether to force the rebuilding of the model schema. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True
_parent_namespace_depth int

The depth level of the parent namespace. Defaults to 2.

2
_types_namespace dict[str, Any] | None

The types namespace. Defaults to None.

None

Raises:

Type Description
PlateformeError

If an error occurred while rebuilding the model adapter and raise_errors is set to True.

PydanticUndefinedAnnotation

If PydanticUndefinedAnnotation occurs in__get_pydantic_core_schema__ and raise_errors is set to True.

Returns:

Type Description
bool | None

Returns None if the schema is already "complete" and rebuilding

bool | None

was not required. If rebuilding was required, returns True if

bool | None

rebuilding was successful, otherwise False if an error

bool | None

occurred and raise_errors is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_rebuild(  # type: ignore[override, unused-ignore]
    cls,
    *,
    force: bool = False,
    raise_errors: bool = True,
    _parent_namespace_depth: int = 2,
    _types_namespace: dict[str, Any] | None = None,
) -> bool | None:
    """Try to rebuild the pydantic-core schema for the model.

    This may be necessary when one of the annotations is a `ForwardRef`
    which could not be resolved during the initial attempt to build the
    schema, and automatic rebuilding fails.

    Args:
        force: Whether to force the rebuilding of the model schema.
            Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.
        _parent_namespace_depth: The depth level of the parent namespace.
            Defaults to 2.
        _types_namespace: The types namespace. Defaults to ``None``.

    Raises:
        PlateformeError: If an error occurred while rebuilding the model
            adapter and `raise_errors` is set to ``True``.
        PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation`
            occurs in`__get_pydantic_core_schema__` and `raise_errors` is
            set to ``True``.

    Returns:
        Returns ``None`` if the schema is already "complete" and rebuilding
        was not required. If rebuilding was required, returns ``True`` if
        rebuilding was successful, otherwise ``False`` if an error
        occurred and `raise_errors` is set to ``False``.
    """
    build_status: bool | None = None

    # Rebuild model
    build_status = super().model_rebuild(
        force=build_status or force,
        raise_errors=raise_errors,
        _parent_namespace_depth=_parent_namespace_depth,
        _types_namespace=_types_namespace,
    )

    # Rebuild model adapter
    if build_status:
        try:
            adapter = TypeAdapterList(cls)
            setattr(cls, '__pydantic_adapter__', adapter)
        except Exception as error:
            if not raise_errors:
                return False
            raise PlateformeError(
                f"Failed to rebuild model adapter for {cls.__name__!r}.",
                code='model-build-failed',
            )

    if build_status is not False:
        cls.model_config.pop('defer_build')

    return build_status

model_validate classmethod

model_validate(
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given object against the model.

Parameters:

Name Type Description Default
obj Any

The object to validate.

required
strict bool | None

Whether to enforce types strictly.

None
from_attributes bool | None

Whether to extract data from the object attributes.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given object against the model.

    Args:
        obj: The object to validate.
        strict: Whether to enforce types strictly.
        from_attributes: Whether to extract data from the object
            attributes.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_python(  # type: ignore
        obj,
        strict=strict,
        from_attributes=from_attributes,
        context=context,
    )

model_validate_json classmethod

model_validate_json(
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given JSON data against the model.

Parameters:

Name Type Description Default
json_data str | bytes | bytearray

The JSON data to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValueError

If json_data is not a JSON string.

ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_json(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given JSON data against the model.

    Args:
        json_data: The JSON data to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValueError: If `json_data` is not a JSON string.
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_json(  # type: ignore
        json_data, strict=strict, context=context
    )

model_validate_strings classmethod

model_validate_strings(
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given string object against the model.

Parameters:

Name Type Description Default
obj Any

The string object to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_strings(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given string object against the model.

    Args:
        obj: The string object to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_strings(  # type: ignore
        obj, strict=strict, context=context
    )

copy

copy(
    *,
    include: AbstractSetIntStr
    | MappingIntStrAny
    | None = None,
    exclude: AbstractSetIntStr
    | MappingIntStrAny
    | None = None,
    update: Dict[str, Any] | None = None,
    deep: bool = False,
) -> Model

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)
data = {**data, **(update or {})}
copied = self.model_validate(data)

Parameters:

Name Type Description Default
include AbstractSetIntStr | MappingIntStrAny | None

Optional set or mapping specifying which fields to include in the copied model.

None
exclude AbstractSetIntStr | MappingIntStrAny | None

Optional set or mapping specifying which fields to exclude in the copied model.

None
update Dict[str, Any] | None

Optional dictionary of field-value pairs to override field values in the copied model.

None
deep bool

If True, the values of fields that are Pydantic models will be deep-copied.

False

Returns:

Type Description
Model

A copy of the model with included, excluded and updated fields as specified.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
@typing_extensions.deprecated(
    'The `copy` method is deprecated; use `model_copy` instead. '
    'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
    category=None,
)
def copy(
    self: Model,
    *,
    include: AbstractSetIntStr | MappingIntStrAny | None = None,
    exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
    update: typing.Dict[str, Any] | None = None,  # noqa UP006
    deep: bool = False,
) -> Model:  # pragma: no cover
    """Returns a copy of the model.

    !!! warning "Deprecated"
        This method is now deprecated; use `model_copy` instead.

    If you need `include` or `exclude`, use:

    ```py
    data = self.model_dump(include=include, exclude=exclude, round_trip=True)
    data = {**data, **(update or {})}
    copied = self.model_validate(data)
    ```

    Args:
        include: Optional set or mapping specifying which fields to include in the copied model.
        exclude: Optional set or mapping specifying which fields to exclude in the copied model.
        update: Optional dictionary of field-value pairs to override field values in the copied model.
        deep: If True, the values of fields that are Pydantic models will be deep-copied.

    Returns:
        A copy of the model with included, excluded and updated fields as specified.
    """
    warnings.warn(
        'The `copy` method is deprecated; use `model_copy` instead. '
        'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
        category=PydanticDeprecatedSince20,
    )
    from .deprecated import copy_internals

    values = dict(
        copy_internals._iter(
            self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
        ),
        **(update or {}),
    )
    if self.__pydantic_private__ is None:
        private = None
    else:
        private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}

    if self.__pydantic_extra__ is None:
        extra: dict[str, Any] | None = None
    else:
        extra = self.__pydantic_extra__.copy()
        for k in list(self.__pydantic_extra__):
            if k not in values:  # k was in the exclude
                extra.pop(k)
        for k in list(values):
            if k in self.__pydantic_extra__:  # k must have come from extra
                extra[k] = values.pop(k)

    # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg
    if update:
        fields_set = self.__pydantic_fields_set__ | update.keys()
    else:
        fields_set = set(self.__pydantic_fields_set__)

    # removing excluded fields from `__pydantic_fields_set__`
    if exclude:
        fields_set -= set(exclude)

    return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep)

model_adapter

model_adapter() -> TypeAdapterList[BaseModel]

Get the model type adapter.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classproperty
def model_adapter(cls) -> TypeAdapterList['BaseModel']:
    """Get the model type adapter."""
    if not hasattr(cls, '__pydantic_adapter__'):
        raise AttributeError(
            "The model type adapter is not defined. This may be due to "
            "the model not being fully built or an error occurred during "
            "model construction."
        )
    return cls.__pydantic_adapter__

model_revalidate

model_revalidate(
    *,
    force: bool = False,
    raise_errors: bool = True,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> bool | None

Revalidate the model instance.

It revalidates the model instance in place, enforcing the types strictly if specified. If the model instance has already been validated, it will not be revalidated unless the force argument is set to True.

Parameters:

Name Type Description Default
force bool

Whether to force the revalidation of the model instance. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Raises:

Type Description
ValidationError

If the model instance could not be validated and raise_errors is set to True.

Returns:

Type Description
bool | None

Returns None if the model instance is already "validated" and

bool | None

revalidation was not required. If validation was required, returns

bool | None

True if validation was successful, otherwise False if an

bool | None

error occurred and raise_errors is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_revalidate(
    self,
    *,
    force: bool = False,
    raise_errors: bool = True,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> bool | None:
    """Revalidate the model instance.

    It revalidates the model instance in place, enforcing the types
    strictly if specified. If the model instance has already been
    validated, it will not be revalidated unless the `force` argument is
    set to ``True``.

    Args:
        force: Whether to force the revalidation of the model instance.
            Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Raises:
        ValidationError: If the model instance could not be validated and
            `raise_errors` is set to ``True``.

    Returns:
        Returns ``None`` if the model instance is already "validated" and
        revalidation was not required. If validation was required, returns
        ``True`` if validation was successful, otherwise ``False`` if an
        error occurred and `raise_errors` is set to ``False``.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    if not force and self.__pydantic_validated__:
        return None
    else:
        try:
            self.__pydantic_validated__ = False
            self.__pydantic_validator__.validate_python(
                self,
                strict=strict,
                from_attributes=True,
                context=context,
                self_instance=self,
            )
        except Exception as error:
            if raise_errors:
                raise error
            return False
        return True

model_update

model_update(
    obj: Any,
    *,
    update: dict[str, Any] | None = None,
    from_attributes: bool | None = None,
) -> None

Update the model with the given object and update dictionary.

Parameters:

Name Type Description Default
obj Any

The object to update the model with. It can be a dictionary or an object with attributes (if from_attributes is set to True). If it is a dictionary, the keys must match the model field names if extra fields are not allowed.

required
update dict[str, Any] | None

Values to add/modify within the model. Note that if assignment validation is not set to True, the integrity of the data is not validated when updating the model. Data should be trusted or pre-validated in this case. Defaults to None.

None
from_attributes bool | None

Whether to extract data from object attributes. Defaults to None.

None

Raises:

Type Description
ValidationError

If the object could not be validated.

ValueError

If strict or context are set when validate_assignment is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_update(
    self,
    obj: Any,
    *,
    update: dict[str, Any] | None = None,
    from_attributes: bool | None = None,
) -> None:
    """Update the model with the given object and update dictionary.

    Args:
        obj: The object to update the model with. It can be a dictionary
            or an object with attributes (if `from_attributes` is set to
            ``True``). If it is a dictionary, the keys must match the model
            field names if extra fields are not allowed.
        update: Values to add/modify within the model. Note that if
            assignment validation is not set to ``True``, the integrity of
            the data is not validated when updating the model. Data should
            be trusted or pre-validated in this case. Defaults to ``None``.
        from_attributes: Whether to extract data from object attributes.
            Defaults to ``None``.

    Raises:
        ValidationError: If the object could not be validated.
        ValueError: If `strict` or `context` are set when
            `validate_assignment` is set to ``False``.
    """
    # Collect update
    update = (update or {}).copy()
    if from_attributes:
        for field_name in self.model_fields:
            if hasattr(obj, field_name):
                update.setdefault(field_name, getattr(obj, field_name))
    elif isinstance(obj, dict):
        update = {**obj, **update}

    # Process update
    for key, value in update.items():
        if key in self.model_fields:
            self.__dict__[key] = value
        else:
            if self.model_config.extra == 'allow':
                if self.__pydantic_extra__ is None:
                    self.__pydantic_extra__ = {}
                self.__pydantic_extra__[key] = value
            elif self.model_config.extra == 'ignore':
                self.__dict__[key] = value
            else:
                raise ValueError(
                    f"Extra field {key!r} is not permitted on the "
                    f"model {self.__class__.__qualname__!r}."
                )

    # Update fields set
    self.__pydantic_fields_set__.update(update.keys())

model_validate_many classmethod

model_validate_many(
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given object collection against the model.

Parameters:

Name Type Description Default
obj Any

The object collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
from_attributes bool | None

Whether to extract data from the object collection items attributes.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_many(
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given object collection against the model.

    Args:
        obj: The object collection to validate.
        strict: Whether to enforce types strictly.
        from_attributes: Whether to extract data from the object
            collection items attributes.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_python(  # type: ignore
        obj,
        strict=strict,
        from_attributes=from_attributes,
        context=context,
    )

model_validate_json_many classmethod

model_validate_json_many(
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given JSON data collection against the model.

Parameters:

Name Type Description Default
json_data str | bytes | bytearray

The JSON data collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValueError

If json_data is not a JSON string.

ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_json_many(
    cls: type[Model],
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given JSON data collection against the model.

    Args:
        json_data: The JSON data collection to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValueError: If `json_data` is not a JSON string.
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_json(  # type: ignore
        json_data, strict=strict, context=context
    )

model_validate_strings_many classmethod

model_validate_strings_many(
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given string object collection against the model.

Parameters:

Name Type Description Default
obj Any

The string object collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_strings_many(
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given string object collection against the model.

    Args:
        obj: The string object collection to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_strings(  # type: ignore
        obj, strict=strict, context=context
    )

ProjectContactInfo

ProjectContactInfo(**data: Any)

Bases: BaseModel

Project contact information.

Initialize a model instance.

It initializes a model instance by parsing and validating input data from the data keyword arguments.

Parameters:

Name Type Description Default
**data Any

The input data to initialize the model instance.

{}

Raises:

Type Description
ValidationError

If the object could not be validated.

Note

The argument self is explicitly positional-only to allow self as a field name and data keyword argument.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def __init__(self, /, **data: Any) -> None:
    """Initialize a model instance.

    It initializes a model instance by parsing and validating input data
    from the `data` keyword arguments.

    Args:
        **data: The input data to initialize the model instance.

    Raises:
        ValidationError: If the object could not be validated.

    Note:
        The argument ``self`` is explicitly positional-only to allow
        ``self`` as a field name and data keyword argument.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    self.__pydantic_validator__.validate_python(data, self_instance=self)

model_extra property

model_extra: dict[str, Any] | None

Get extra fields set during validation.

Returns:

Type Description
dict[str, Any] | None

A dictionary of extra fields, or None if config.extra is not set to "allow".

model_fields_set property

model_fields_set: set[str]

Returns the set of fields that have been explicitly set on this model instance.

Returns:

Type Description
set[str]

A set of strings representing the fields that have been set, i.e. that were not filled from defaults.

model_construct classmethod

model_construct(
    _fields_set: set[str] | None = None, **data: Any
) -> Model

Creates a new instance of the model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed. It behaves as if model_config.extra = 'allow' was set since it adds all passed values.

Parameters:

Name Type Description Default
_fields_set set[str] | None

The set of field names accepted by the model instance.

None
**data Any

Trusted or pre-validated input data to initialize the model. It is used to set the __dict__ attribute of the model.

{}

Returns:

Type Description
Model

A new instance of the model class with validated data.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_construct(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    _fields_set: set[str] | None = None,
    **data: Any,
) -> Model:
    """Creates a new instance of the model class with validated data.

    Creates a new model setting `__dict__` and `__pydantic_fields_set__`
    from trusted or pre-validated data. Default values are respected, but
    no other validation is performed. It behaves as if
    `model_config.extra = 'allow'` was set since it adds all passed values.

    Args:
        _fields_set: The set of field names accepted by the model instance.
        **data: Trusted or pre-validated input data to initialize the
            model. It is used to set the `__dict__` attribute of the model.

    Returns:
        A new instance of the model class with validated data.
    """
    model = super().model_construct(_fields_set, **data)

    # Remove default initialization of instrumented resource fields, as
    # they are not needed when constructing a resource instance directly,
    # i.e. defaults are already set and stored in the database.
    if cls.__pydantic_owner__ == 'resource':
        resource = cls.__pydantic_resource__
        for name in getattr(resource, 'resource_attributes'):
            if _fields_set and name in _fields_set:
                continue
            model.__dict__.pop(name, None)

    return model

model_copy

model_copy(
    *,
    update: dict[str, Any] | None = None,
    deep: bool = False,
) -> Model

Returns a copy of the model.

Parameters:

Name Type Description Default
update dict[str, Any] | None

Values to add/modify within the new model. Note that if assignment validation is not set to True, the integrity of the data is not validated when creating the new model. Data should be trusted or pre-validated in this case.

None
deep bool

Set to True to make a deep copy of the model.

False

Returns:

Type Description
Model

A new copy of the model instance with the updated values.

Raises:

Type Description
ValidationError

If the object could not be validated.

ValueError

If strict or context are set when validate_assignment is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_copy(  # type: ignore[override, unused-ignore]
    self: Model,
    *,
    update: dict[str, Any] | None = None,
    deep: bool = False,
) -> Model:
    """Returns a copy of the model.

    Args:
        update: Values to add/modify within the new model. Note that if
            assignment validation is not set to ``True``, the integrity of
            the data is not validated when creating the new model. Data
            should be trusted or pre-validated in this case.
        deep: Set to ``True`` to make a deep copy of the model.

    Returns:
        A new copy of the model instance with the updated values.

    Raises:
        ValidationError: If the object could not be validated.
        ValueError: If `strict` or `context` are set when
            `validate_assignment` is set to ``False``.
    """
    copied = self.__deepcopy__() if deep else self.__copy__()
    if update:
        copied.model_update(update, from_attributes=False)
    return copied

model_dump

model_dump(
    *,
    mode: Literal["json", "python", "raw"] | str = "python",
    include: IncEx | None = None,
    exclude: IncEx | None = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> dict[str, Any]

Generate a dictionary representation of the model.

It is used to dump the model instance to a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default
mode Literal['json', 'python', 'raw'] | str

The mode in which to_python should run: - If mode is json, the output will only contain JSON serializable types. - If mode is python, the output may contain non JSON serializable Python objects. - If mode is raw, the output will contain raw values. Defaults to python.

'python'
include IncEx | None

A list of fields to include in the output. Defaults to None.

None
exclude IncEx | None

A list of fields to exclude from the output. Defaults to None.

None
by_alias bool

Whether to use the field's alias in the dictionary key if defined. Defaults to False.

False
exclude_unset bool

Whether to exclude fields that have not been explicitly set. Defaults to False.

False
exclude_defaults bool

Whether to exclude fields that are set to their default value. Defaults to False.

False
exclude_none bool

Whether to exclude fields that have a value of None. Defaults to False.

False
round_trip bool

If True, dumped values should be valid as input for non-idempotent types such as Json[T]. Defaults to False.

False
warnings bool

Whether to log warnings when invalid fields are encountered. Defaults to True.

True

Returns:

Type Description
dict[str, Any]

A dictionary representation of the model.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_dump(  # type: ignore[override, unused-ignore]
    self,
    *,
    mode: Literal['json', 'python', 'raw'] | str = 'python',
    include: IncEx | None = None,
    exclude: IncEx | None = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> dict[str, Any]:
    """Generate a dictionary representation of the model.

    It is used to dump the model instance to a dictionary representation of
    the model, optionally specifying which fields to include or exclude.

    Args:
        mode: The mode in which `to_python` should run:
            - If mode is ``json``, the output will only contain JSON
                serializable types.
            - If mode is ``python``, the output may contain non JSON
                serializable Python objects.
            - If mode is ``raw``, the output will contain raw values.
            Defaults to ``python``.
        include: A list of fields to include in the output.
            Defaults to ``None``.
        exclude: A list of fields to exclude from the output.
            Defaults to ``None``.
        by_alias: Whether to use the field's alias in the dictionary key if
            defined. Defaults to ``False``.
        exclude_unset: Whether to exclude fields that have not been
            explicitly set. Defaults to ``False``.
        exclude_defaults: Whether to exclude fields that are set to their
            default value. Defaults to ``False``.
        exclude_none: Whether to exclude fields that have a value of
            ``None``. Defaults to ``False``.
        round_trip: If ``True``, dumped values should be valid as input for
            non-idempotent types such as `Json[T]`. Defaults to ``False``.
        warnings: Whether to log warnings when invalid fields are
            encountered. Defaults to ``True``.

    Returns:
        A dictionary representation of the model.
    """
    if mode != 'raw':
        return self.__pydantic_serializer__.to_python(  # type: ignore
            self,
            mode=mode,
            by_alias=by_alias,
            include=include,  # type: ignore
            exclude=exclude,  # type: ignore
            exclude_unset=exclude_unset,
            exclude_defaults=exclude_defaults,
            exclude_none=exclude_none,
            round_trip=round_trip,
            warnings=warnings,
        )

    # Handle raw mode
    result: dict[str, Any] = {}
    for field_name, field_info in self.model_fields.items():
        if not hasattr(self, field_name):
            continue
        value = getattr(self, field_name)
        # Skip excluded fields
        if include is not None and field_name not in include:
            continue
        if exclude is not None and field_name in exclude:
            continue
        if exclude_unset and field_name not in self.model_fields_set:
            continue
        if exclude_defaults and value == field_info.default:
            continue
        if exclude_none and value is None:
            continue
        # Add field value
        if by_alias and field_info.alias:
            result[field_info.alias] = value
        else:
            result[field_name] = value
    return result

model_dump_json

model_dump_json(
    *,
    indent: int | None = None,
    include: IncEx = None,
    exclude: IncEx = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> str

Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default
indent int | None

Indentation to use in the JSON output. If None is passed, the output will be compact.

None
include IncEx

Field(s) to include in the JSON output.

None
exclude IncEx

Field(s) to exclude from the JSON output.

None
by_alias bool

Whether to serialize using field aliases.

False
exclude_unset bool

Whether to exclude fields that have not been explicitly set.

False
exclude_defaults bool

Whether to exclude fields that are set to their default value.

False
exclude_none bool

Whether to exclude fields that have a value of None.

False
round_trip bool

If True, dumped values should be valid as input for non-idempotent types such as Json[T].

False
warnings bool

Whether to log warnings when invalid fields are encountered.

True

Returns:

Type Description
str

A JSON string representation of the model.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
def model_dump_json(
    self,
    *,
    indent: int | None = None,
    include: IncEx = None,
    exclude: IncEx = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> str:
    """Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json

    Generates a JSON representation of the model using Pydantic's `to_json` method.

    Args:
        indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
        include: Field(s) to include in the JSON output.
        exclude: Field(s) to exclude from the JSON output.
        by_alias: Whether to serialize using field aliases.
        exclude_unset: Whether to exclude fields that have not been explicitly set.
        exclude_defaults: Whether to exclude fields that are set to their default value.
        exclude_none: Whether to exclude fields that have a value of `None`.
        round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
        warnings: Whether to log warnings when invalid fields are encountered.

    Returns:
        A JSON string representation of the model.
    """
    return self.__pydantic_serializer__.to_json(
        self,
        indent=indent,
        include=include,
        exclude=exclude,
        by_alias=by_alias,
        exclude_unset=exclude_unset,
        exclude_defaults=exclude_defaults,
        exclude_none=exclude_none,
        round_trip=round_trip,
        warnings=warnings,
    ).decode()

model_json_schema classmethod

model_json_schema(
    by_alias: bool = True,
    ref_template: str = DEFAULT_REF_TEMPLATE,
    schema_generator: type[
        GenerateJsonSchema
    ] = GenerateJsonSchema,
    mode: JsonSchemaMode = "validation",
    source: JsonSchemaSource = "model",
) -> dict[str, Any]

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default
by_alias bool

Whether to use field aliases when generating the schema, i.e. if True, fields will be serialized according to their alias, otherwise according to their attribute name. Defaults to True.

True
ref_template str

The template format string used when generating reference names. Defaults to DEFAULT_REF_TEMPLATE.

DEFAULT_REF_TEMPLATE
schema_generator type[GenerateJsonSchema]

The class to use for generating the JSON Schema.

GenerateJsonSchema
mode JsonSchemaMode

The mode to use for generating the JSON Schema. It can be either validation or serialization where respectively the schema is generated for validating data or serializing data. Defaults to validation.

'validation'
source JsonSchemaSource

The source type to use for generating the resources JSON schema. It can be either key , model, or both where the latter accepts, when applicable, integer and string values for key identifiers in addition to the standard model schema generation. Defaults to model.

'model'

Returns:

Type Description
dict[str, Any]

The generated JSON schema of the model class.

Note

The schema generator class can be overridden to customize the logic used to generate the JSON schema. This can be done by subclassing the GenerateJsonSchema class and passing the subclass as the schema_generator argument.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_json_schema(  # type: ignore[override, unused-ignore]
    cls,
    by_alias: bool = True,
    ref_template: str = DEFAULT_REF_TEMPLATE,
    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
    mode: JsonSchemaMode = 'validation',
    source: JsonSchemaSource = 'model',
) -> dict[str, Any]:
    """Generates a JSON schema for a model class.

    Args:
        by_alias: Whether to use field aliases when generating the schema,
            i.e. if ``True``, fields will be serialized according to their
            alias, otherwise according to their attribute name.
            Defaults to ``True``.
        ref_template: The template format string used when generating
            reference names. Defaults to ``DEFAULT_REF_TEMPLATE``.
        schema_generator: The class to use for generating the JSON Schema.
        mode: The mode to use for generating the JSON Schema. It can be
            either ``validation`` or ``serialization`` where respectively
            the schema is generated for validating data or serializing
            data. Defaults to ``validation``.
        source: The source type to use for generating the resources JSON
            schema. It can be either ``key`` , ``model``, or ``both`` where
            the latter accepts, when applicable, integer and string values
            for key identifiers in addition to the standard model schema
            generation. Defaults to ``model``.

    Returns:
        The generated JSON schema of the model class.

    Note:
        The schema generator class can be overridden to customize the
        logic used to generate the JSON schema. This can be done by
        subclassing the `GenerateJsonSchema` class and passing the subclass
        as the `schema_generator` argument.
    """
    schema_generator_instance = schema_generator(
        by_alias=by_alias, ref_template=ref_template
    )
    if isinstance(cls.__pydantic_validator__, _mock_val_ser.MockValSer):
        cls.__pydantic_validator__.rebuild()
    return schema_generator_instance.generate(
        cls.__pydantic_core_schema__, mode=mode, source=source
    )

model_parametrized_name classmethod

model_parametrized_name(
    params: tuple[type[Any], ...],
) -> str

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default
params tuple[type[Any], ...]

Tuple of types of the class. Given a generic class Model with 2 type variables and a concrete model Model[str, int], the value (str, int) would be passed to params.

required

Returns:

Type Description
str

String representing the new class where params are passed to cls as type variables.

Raises:

Type Description
TypeError

Raised when trying to generate concrete names for non-generic models.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
@classmethod
def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
    """Compute the class name for parametrizations of generic classes.

    This method can be overridden to achieve a custom naming scheme for generic BaseModels.

    Args:
        params: Tuple of types of the class. Given a generic class
            `Model` with 2 type variables and a concrete model `Model[str, int]`,
            the value `(str, int)` would be passed to `params`.

    Returns:
        String representing the new class where `params` are passed to `cls` as type variables.

    Raises:
        TypeError: Raised when trying to generate concrete names for non-generic models.
    """
    if not issubclass(cls, typing.Generic):
        raise TypeError('Concrete names should only be generated for generic models.')

    # Any strings received should represent forward references, so we handle them specially below.
    # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future,
    # we may be able to remove this special case.
    param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params]
    params_component = ', '.join(param_names)
    return f'{cls.__name__}[{params_component}]'

model_post_init

model_post_init(__context: Any) -> None

Post-initialization method for the model class.

Override this method to perform additional initialization after the __init__ and model_construct methods have been called. This is useful in scenarios where it is necessary to perform additional initialization steps after the model has been fully initialized.

Parameters:

Name Type Description Default
__context Any

The context object passed to the model instance.

required
Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_post_init(self, __context: Any) -> None:
    """Post-initialization method for the model class.

    Override this method to perform additional initialization after the
    `__init__` and `model_construct` methods have been called. This is
    useful in scenarios where it is necessary to perform additional
    initialization steps after the model has been fully initialized.

    Args:
        __context: The context object passed to the model instance.
    """
    ...

model_rebuild classmethod

model_rebuild(
    *,
    force: bool = False,
    raise_errors: bool = True,
    _parent_namespace_depth: int = 2,
    _types_namespace: dict[str, Any] | None = None,
) -> bool | None

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default
force bool

Whether to force the rebuilding of the model schema. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True
_parent_namespace_depth int

The depth level of the parent namespace. Defaults to 2.

2
_types_namespace dict[str, Any] | None

The types namespace. Defaults to None.

None

Raises:

Type Description
PlateformeError

If an error occurred while rebuilding the model adapter and raise_errors is set to True.

PydanticUndefinedAnnotation

If PydanticUndefinedAnnotation occurs in__get_pydantic_core_schema__ and raise_errors is set to True.

Returns:

Type Description
bool | None

Returns None if the schema is already "complete" and rebuilding

bool | None

was not required. If rebuilding was required, returns True if

bool | None

rebuilding was successful, otherwise False if an error

bool | None

occurred and raise_errors is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_rebuild(  # type: ignore[override, unused-ignore]
    cls,
    *,
    force: bool = False,
    raise_errors: bool = True,
    _parent_namespace_depth: int = 2,
    _types_namespace: dict[str, Any] | None = None,
) -> bool | None:
    """Try to rebuild the pydantic-core schema for the model.

    This may be necessary when one of the annotations is a `ForwardRef`
    which could not be resolved during the initial attempt to build the
    schema, and automatic rebuilding fails.

    Args:
        force: Whether to force the rebuilding of the model schema.
            Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.
        _parent_namespace_depth: The depth level of the parent namespace.
            Defaults to 2.
        _types_namespace: The types namespace. Defaults to ``None``.

    Raises:
        PlateformeError: If an error occurred while rebuilding the model
            adapter and `raise_errors` is set to ``True``.
        PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation`
            occurs in`__get_pydantic_core_schema__` and `raise_errors` is
            set to ``True``.

    Returns:
        Returns ``None`` if the schema is already "complete" and rebuilding
        was not required. If rebuilding was required, returns ``True`` if
        rebuilding was successful, otherwise ``False`` if an error
        occurred and `raise_errors` is set to ``False``.
    """
    build_status: bool | None = None

    # Rebuild model
    build_status = super().model_rebuild(
        force=build_status or force,
        raise_errors=raise_errors,
        _parent_namespace_depth=_parent_namespace_depth,
        _types_namespace=_types_namespace,
    )

    # Rebuild model adapter
    if build_status:
        try:
            adapter = TypeAdapterList(cls)
            setattr(cls, '__pydantic_adapter__', adapter)
        except Exception as error:
            if not raise_errors:
                return False
            raise PlateformeError(
                f"Failed to rebuild model adapter for {cls.__name__!r}.",
                code='model-build-failed',
            )

    if build_status is not False:
        cls.model_config.pop('defer_build')

    return build_status

model_validate classmethod

model_validate(
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given object against the model.

Parameters:

Name Type Description Default
obj Any

The object to validate.

required
strict bool | None

Whether to enforce types strictly.

None
from_attributes bool | None

Whether to extract data from the object attributes.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given object against the model.

    Args:
        obj: The object to validate.
        strict: Whether to enforce types strictly.
        from_attributes: Whether to extract data from the object
            attributes.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_python(  # type: ignore
        obj,
        strict=strict,
        from_attributes=from_attributes,
        context=context,
    )

model_validate_json classmethod

model_validate_json(
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given JSON data against the model.

Parameters:

Name Type Description Default
json_data str | bytes | bytearray

The JSON data to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValueError

If json_data is not a JSON string.

ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_json(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given JSON data against the model.

    Args:
        json_data: The JSON data to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValueError: If `json_data` is not a JSON string.
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_json(  # type: ignore
        json_data, strict=strict, context=context
    )

model_validate_strings classmethod

model_validate_strings(
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given string object against the model.

Parameters:

Name Type Description Default
obj Any

The string object to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_strings(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given string object against the model.

    Args:
        obj: The string object to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_strings(  # type: ignore
        obj, strict=strict, context=context
    )

copy

copy(
    *,
    include: AbstractSetIntStr
    | MappingIntStrAny
    | None = None,
    exclude: AbstractSetIntStr
    | MappingIntStrAny
    | None = None,
    update: Dict[str, Any] | None = None,
    deep: bool = False,
) -> Model

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)
data = {**data, **(update or {})}
copied = self.model_validate(data)

Parameters:

Name Type Description Default
include AbstractSetIntStr | MappingIntStrAny | None

Optional set or mapping specifying which fields to include in the copied model.

None
exclude AbstractSetIntStr | MappingIntStrAny | None

Optional set or mapping specifying which fields to exclude in the copied model.

None
update Dict[str, Any] | None

Optional dictionary of field-value pairs to override field values in the copied model.

None
deep bool

If True, the values of fields that are Pydantic models will be deep-copied.

False

Returns:

Type Description
Model

A copy of the model with included, excluded and updated fields as specified.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
@typing_extensions.deprecated(
    'The `copy` method is deprecated; use `model_copy` instead. '
    'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
    category=None,
)
def copy(
    self: Model,
    *,
    include: AbstractSetIntStr | MappingIntStrAny | None = None,
    exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
    update: typing.Dict[str, Any] | None = None,  # noqa UP006
    deep: bool = False,
) -> Model:  # pragma: no cover
    """Returns a copy of the model.

    !!! warning "Deprecated"
        This method is now deprecated; use `model_copy` instead.

    If you need `include` or `exclude`, use:

    ```py
    data = self.model_dump(include=include, exclude=exclude, round_trip=True)
    data = {**data, **(update or {})}
    copied = self.model_validate(data)
    ```

    Args:
        include: Optional set or mapping specifying which fields to include in the copied model.
        exclude: Optional set or mapping specifying which fields to exclude in the copied model.
        update: Optional dictionary of field-value pairs to override field values in the copied model.
        deep: If True, the values of fields that are Pydantic models will be deep-copied.

    Returns:
        A copy of the model with included, excluded and updated fields as specified.
    """
    warnings.warn(
        'The `copy` method is deprecated; use `model_copy` instead. '
        'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
        category=PydanticDeprecatedSince20,
    )
    from .deprecated import copy_internals

    values = dict(
        copy_internals._iter(
            self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
        ),
        **(update or {}),
    )
    if self.__pydantic_private__ is None:
        private = None
    else:
        private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}

    if self.__pydantic_extra__ is None:
        extra: dict[str, Any] | None = None
    else:
        extra = self.__pydantic_extra__.copy()
        for k in list(self.__pydantic_extra__):
            if k not in values:  # k was in the exclude
                extra.pop(k)
        for k in list(values):
            if k in self.__pydantic_extra__:  # k must have come from extra
                extra[k] = values.pop(k)

    # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg
    if update:
        fields_set = self.__pydantic_fields_set__ | update.keys()
    else:
        fields_set = set(self.__pydantic_fields_set__)

    # removing excluded fields from `__pydantic_fields_set__`
    if exclude:
        fields_set -= set(exclude)

    return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep)

model_adapter

model_adapter() -> TypeAdapterList[BaseModel]

Get the model type adapter.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classproperty
def model_adapter(cls) -> TypeAdapterList['BaseModel']:
    """Get the model type adapter."""
    if not hasattr(cls, '__pydantic_adapter__'):
        raise AttributeError(
            "The model type adapter is not defined. This may be due to "
            "the model not being fully built or an error occurred during "
            "model construction."
        )
    return cls.__pydantic_adapter__

model_revalidate

model_revalidate(
    *,
    force: bool = False,
    raise_errors: bool = True,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> bool | None

Revalidate the model instance.

It revalidates the model instance in place, enforcing the types strictly if specified. If the model instance has already been validated, it will not be revalidated unless the force argument is set to True.

Parameters:

Name Type Description Default
force bool

Whether to force the revalidation of the model instance. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Raises:

Type Description
ValidationError

If the model instance could not be validated and raise_errors is set to True.

Returns:

Type Description
bool | None

Returns None if the model instance is already "validated" and

bool | None

revalidation was not required. If validation was required, returns

bool | None

True if validation was successful, otherwise False if an

bool | None

error occurred and raise_errors is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_revalidate(
    self,
    *,
    force: bool = False,
    raise_errors: bool = True,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> bool | None:
    """Revalidate the model instance.

    It revalidates the model instance in place, enforcing the types
    strictly if specified. If the model instance has already been
    validated, it will not be revalidated unless the `force` argument is
    set to ``True``.

    Args:
        force: Whether to force the revalidation of the model instance.
            Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Raises:
        ValidationError: If the model instance could not be validated and
            `raise_errors` is set to ``True``.

    Returns:
        Returns ``None`` if the model instance is already "validated" and
        revalidation was not required. If validation was required, returns
        ``True`` if validation was successful, otherwise ``False`` if an
        error occurred and `raise_errors` is set to ``False``.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    if not force and self.__pydantic_validated__:
        return None
    else:
        try:
            self.__pydantic_validated__ = False
            self.__pydantic_validator__.validate_python(
                self,
                strict=strict,
                from_attributes=True,
                context=context,
                self_instance=self,
            )
        except Exception as error:
            if raise_errors:
                raise error
            return False
        return True

model_update

model_update(
    obj: Any,
    *,
    update: dict[str, Any] | None = None,
    from_attributes: bool | None = None,
) -> None

Update the model with the given object and update dictionary.

Parameters:

Name Type Description Default
obj Any

The object to update the model with. It can be a dictionary or an object with attributes (if from_attributes is set to True). If it is a dictionary, the keys must match the model field names if extra fields are not allowed.

required
update dict[str, Any] | None

Values to add/modify within the model. Note that if assignment validation is not set to True, the integrity of the data is not validated when updating the model. Data should be trusted or pre-validated in this case. Defaults to None.

None
from_attributes bool | None

Whether to extract data from object attributes. Defaults to None.

None

Raises:

Type Description
ValidationError

If the object could not be validated.

ValueError

If strict or context are set when validate_assignment is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_update(
    self,
    obj: Any,
    *,
    update: dict[str, Any] | None = None,
    from_attributes: bool | None = None,
) -> None:
    """Update the model with the given object and update dictionary.

    Args:
        obj: The object to update the model with. It can be a dictionary
            or an object with attributes (if `from_attributes` is set to
            ``True``). If it is a dictionary, the keys must match the model
            field names if extra fields are not allowed.
        update: Values to add/modify within the model. Note that if
            assignment validation is not set to ``True``, the integrity of
            the data is not validated when updating the model. Data should
            be trusted or pre-validated in this case. Defaults to ``None``.
        from_attributes: Whether to extract data from object attributes.
            Defaults to ``None``.

    Raises:
        ValidationError: If the object could not be validated.
        ValueError: If `strict` or `context` are set when
            `validate_assignment` is set to ``False``.
    """
    # Collect update
    update = (update or {}).copy()
    if from_attributes:
        for field_name in self.model_fields:
            if hasattr(obj, field_name):
                update.setdefault(field_name, getattr(obj, field_name))
    elif isinstance(obj, dict):
        update = {**obj, **update}

    # Process update
    for key, value in update.items():
        if key in self.model_fields:
            self.__dict__[key] = value
        else:
            if self.model_config.extra == 'allow':
                if self.__pydantic_extra__ is None:
                    self.__pydantic_extra__ = {}
                self.__pydantic_extra__[key] = value
            elif self.model_config.extra == 'ignore':
                self.__dict__[key] = value
            else:
                raise ValueError(
                    f"Extra field {key!r} is not permitted on the "
                    f"model {self.__class__.__qualname__!r}."
                )

    # Update fields set
    self.__pydantic_fields_set__.update(update.keys())

model_validate_many classmethod

model_validate_many(
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given object collection against the model.

Parameters:

Name Type Description Default
obj Any

The object collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
from_attributes bool | None

Whether to extract data from the object collection items attributes.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_many(
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given object collection against the model.

    Args:
        obj: The object collection to validate.
        strict: Whether to enforce types strictly.
        from_attributes: Whether to extract data from the object
            collection items attributes.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_python(  # type: ignore
        obj,
        strict=strict,
        from_attributes=from_attributes,
        context=context,
    )

model_validate_json_many classmethod

model_validate_json_many(
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given JSON data collection against the model.

Parameters:

Name Type Description Default
json_data str | bytes | bytearray

The JSON data collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValueError

If json_data is not a JSON string.

ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_json_many(
    cls: type[Model],
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given JSON data collection against the model.

    Args:
        json_data: The JSON data collection to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValueError: If `json_data` is not a JSON string.
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_json(  # type: ignore
        json_data, strict=strict, context=context
    )

model_validate_strings_many classmethod

model_validate_strings_many(
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given string object collection against the model.

Parameters:

Name Type Description Default
obj Any

The string object collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_strings_many(
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given string object collection against the model.

    Args:
        obj: The string object collection to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_strings(  # type: ignore
        obj, strict=strict, context=context
    )

ProjectLicenseInfo

ProjectLicenseInfo(**data: Any)

Bases: BaseModel

Project license information.

Initialize a model instance.

It initializes a model instance by parsing and validating input data from the data keyword arguments.

Parameters:

Name Type Description Default
**data Any

The input data to initialize the model instance.

{}

Raises:

Type Description
ValidationError

If the object could not be validated.

Note

The argument self is explicitly positional-only to allow self as a field name and data keyword argument.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def __init__(self, /, **data: Any) -> None:
    """Initialize a model instance.

    It initializes a model instance by parsing and validating input data
    from the `data` keyword arguments.

    Args:
        **data: The input data to initialize the model instance.

    Raises:
        ValidationError: If the object could not be validated.

    Note:
        The argument ``self`` is explicitly positional-only to allow
        ``self`` as a field name and data keyword argument.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    self.__pydantic_validator__.validate_python(data, self_instance=self)

model_extra property

model_extra: dict[str, Any] | None

Get extra fields set during validation.

Returns:

Type Description
dict[str, Any] | None

A dictionary of extra fields, or None if config.extra is not set to "allow".

model_fields_set property

model_fields_set: set[str]

Returns the set of fields that have been explicitly set on this model instance.

Returns:

Type Description
set[str]

A set of strings representing the fields that have been set, i.e. that were not filled from defaults.

model_construct classmethod

model_construct(
    _fields_set: set[str] | None = None, **data: Any
) -> Model

Creates a new instance of the model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed. It behaves as if model_config.extra = 'allow' was set since it adds all passed values.

Parameters:

Name Type Description Default
_fields_set set[str] | None

The set of field names accepted by the model instance.

None
**data Any

Trusted or pre-validated input data to initialize the model. It is used to set the __dict__ attribute of the model.

{}

Returns:

Type Description
Model

A new instance of the model class with validated data.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_construct(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    _fields_set: set[str] | None = None,
    **data: Any,
) -> Model:
    """Creates a new instance of the model class with validated data.

    Creates a new model setting `__dict__` and `__pydantic_fields_set__`
    from trusted or pre-validated data. Default values are respected, but
    no other validation is performed. It behaves as if
    `model_config.extra = 'allow'` was set since it adds all passed values.

    Args:
        _fields_set: The set of field names accepted by the model instance.
        **data: Trusted or pre-validated input data to initialize the
            model. It is used to set the `__dict__` attribute of the model.

    Returns:
        A new instance of the model class with validated data.
    """
    model = super().model_construct(_fields_set, **data)

    # Remove default initialization of instrumented resource fields, as
    # they are not needed when constructing a resource instance directly,
    # i.e. defaults are already set and stored in the database.
    if cls.__pydantic_owner__ == 'resource':
        resource = cls.__pydantic_resource__
        for name in getattr(resource, 'resource_attributes'):
            if _fields_set and name in _fields_set:
                continue
            model.__dict__.pop(name, None)

    return model

model_copy

model_copy(
    *,
    update: dict[str, Any] | None = None,
    deep: bool = False,
) -> Model

Returns a copy of the model.

Parameters:

Name Type Description Default
update dict[str, Any] | None

Values to add/modify within the new model. Note that if assignment validation is not set to True, the integrity of the data is not validated when creating the new model. Data should be trusted or pre-validated in this case.

None
deep bool

Set to True to make a deep copy of the model.

False

Returns:

Type Description
Model

A new copy of the model instance with the updated values.

Raises:

Type Description
ValidationError

If the object could not be validated.

ValueError

If strict or context are set when validate_assignment is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_copy(  # type: ignore[override, unused-ignore]
    self: Model,
    *,
    update: dict[str, Any] | None = None,
    deep: bool = False,
) -> Model:
    """Returns a copy of the model.

    Args:
        update: Values to add/modify within the new model. Note that if
            assignment validation is not set to ``True``, the integrity of
            the data is not validated when creating the new model. Data
            should be trusted or pre-validated in this case.
        deep: Set to ``True`` to make a deep copy of the model.

    Returns:
        A new copy of the model instance with the updated values.

    Raises:
        ValidationError: If the object could not be validated.
        ValueError: If `strict` or `context` are set when
            `validate_assignment` is set to ``False``.
    """
    copied = self.__deepcopy__() if deep else self.__copy__()
    if update:
        copied.model_update(update, from_attributes=False)
    return copied

model_dump

model_dump(
    *,
    mode: Literal["json", "python", "raw"] | str = "python",
    include: IncEx | None = None,
    exclude: IncEx | None = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> dict[str, Any]

Generate a dictionary representation of the model.

It is used to dump the model instance to a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default
mode Literal['json', 'python', 'raw'] | str

The mode in which to_python should run: - If mode is json, the output will only contain JSON serializable types. - If mode is python, the output may contain non JSON serializable Python objects. - If mode is raw, the output will contain raw values. Defaults to python.

'python'
include IncEx | None

A list of fields to include in the output. Defaults to None.

None
exclude IncEx | None

A list of fields to exclude from the output. Defaults to None.

None
by_alias bool

Whether to use the field's alias in the dictionary key if defined. Defaults to False.

False
exclude_unset bool

Whether to exclude fields that have not been explicitly set. Defaults to False.

False
exclude_defaults bool

Whether to exclude fields that are set to their default value. Defaults to False.

False
exclude_none bool

Whether to exclude fields that have a value of None. Defaults to False.

False
round_trip bool

If True, dumped values should be valid as input for non-idempotent types such as Json[T]. Defaults to False.

False
warnings bool

Whether to log warnings when invalid fields are encountered. Defaults to True.

True

Returns:

Type Description
dict[str, Any]

A dictionary representation of the model.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_dump(  # type: ignore[override, unused-ignore]
    self,
    *,
    mode: Literal['json', 'python', 'raw'] | str = 'python',
    include: IncEx | None = None,
    exclude: IncEx | None = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> dict[str, Any]:
    """Generate a dictionary representation of the model.

    It is used to dump the model instance to a dictionary representation of
    the model, optionally specifying which fields to include or exclude.

    Args:
        mode: The mode in which `to_python` should run:
            - If mode is ``json``, the output will only contain JSON
                serializable types.
            - If mode is ``python``, the output may contain non JSON
                serializable Python objects.
            - If mode is ``raw``, the output will contain raw values.
            Defaults to ``python``.
        include: A list of fields to include in the output.
            Defaults to ``None``.
        exclude: A list of fields to exclude from the output.
            Defaults to ``None``.
        by_alias: Whether to use the field's alias in the dictionary key if
            defined. Defaults to ``False``.
        exclude_unset: Whether to exclude fields that have not been
            explicitly set. Defaults to ``False``.
        exclude_defaults: Whether to exclude fields that are set to their
            default value. Defaults to ``False``.
        exclude_none: Whether to exclude fields that have a value of
            ``None``. Defaults to ``False``.
        round_trip: If ``True``, dumped values should be valid as input for
            non-idempotent types such as `Json[T]`. Defaults to ``False``.
        warnings: Whether to log warnings when invalid fields are
            encountered. Defaults to ``True``.

    Returns:
        A dictionary representation of the model.
    """
    if mode != 'raw':
        return self.__pydantic_serializer__.to_python(  # type: ignore
            self,
            mode=mode,
            by_alias=by_alias,
            include=include,  # type: ignore
            exclude=exclude,  # type: ignore
            exclude_unset=exclude_unset,
            exclude_defaults=exclude_defaults,
            exclude_none=exclude_none,
            round_trip=round_trip,
            warnings=warnings,
        )

    # Handle raw mode
    result: dict[str, Any] = {}
    for field_name, field_info in self.model_fields.items():
        if not hasattr(self, field_name):
            continue
        value = getattr(self, field_name)
        # Skip excluded fields
        if include is not None and field_name not in include:
            continue
        if exclude is not None and field_name in exclude:
            continue
        if exclude_unset and field_name not in self.model_fields_set:
            continue
        if exclude_defaults and value == field_info.default:
            continue
        if exclude_none and value is None:
            continue
        # Add field value
        if by_alias and field_info.alias:
            result[field_info.alias] = value
        else:
            result[field_name] = value
    return result

model_dump_json

model_dump_json(
    *,
    indent: int | None = None,
    include: IncEx = None,
    exclude: IncEx = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> str

Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default
indent int | None

Indentation to use in the JSON output. If None is passed, the output will be compact.

None
include IncEx

Field(s) to include in the JSON output.

None
exclude IncEx

Field(s) to exclude from the JSON output.

None
by_alias bool

Whether to serialize using field aliases.

False
exclude_unset bool

Whether to exclude fields that have not been explicitly set.

False
exclude_defaults bool

Whether to exclude fields that are set to their default value.

False
exclude_none bool

Whether to exclude fields that have a value of None.

False
round_trip bool

If True, dumped values should be valid as input for non-idempotent types such as Json[T].

False
warnings bool

Whether to log warnings when invalid fields are encountered.

True

Returns:

Type Description
str

A JSON string representation of the model.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
def model_dump_json(
    self,
    *,
    indent: int | None = None,
    include: IncEx = None,
    exclude: IncEx = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> str:
    """Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json

    Generates a JSON representation of the model using Pydantic's `to_json` method.

    Args:
        indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
        include: Field(s) to include in the JSON output.
        exclude: Field(s) to exclude from the JSON output.
        by_alias: Whether to serialize using field aliases.
        exclude_unset: Whether to exclude fields that have not been explicitly set.
        exclude_defaults: Whether to exclude fields that are set to their default value.
        exclude_none: Whether to exclude fields that have a value of `None`.
        round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
        warnings: Whether to log warnings when invalid fields are encountered.

    Returns:
        A JSON string representation of the model.
    """
    return self.__pydantic_serializer__.to_json(
        self,
        indent=indent,
        include=include,
        exclude=exclude,
        by_alias=by_alias,
        exclude_unset=exclude_unset,
        exclude_defaults=exclude_defaults,
        exclude_none=exclude_none,
        round_trip=round_trip,
        warnings=warnings,
    ).decode()

model_json_schema classmethod

model_json_schema(
    by_alias: bool = True,
    ref_template: str = DEFAULT_REF_TEMPLATE,
    schema_generator: type[
        GenerateJsonSchema
    ] = GenerateJsonSchema,
    mode: JsonSchemaMode = "validation",
    source: JsonSchemaSource = "model",
) -> dict[str, Any]

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default
by_alias bool

Whether to use field aliases when generating the schema, i.e. if True, fields will be serialized according to their alias, otherwise according to their attribute name. Defaults to True.

True
ref_template str

The template format string used when generating reference names. Defaults to DEFAULT_REF_TEMPLATE.

DEFAULT_REF_TEMPLATE
schema_generator type[GenerateJsonSchema]

The class to use for generating the JSON Schema.

GenerateJsonSchema
mode JsonSchemaMode

The mode to use for generating the JSON Schema. It can be either validation or serialization where respectively the schema is generated for validating data or serializing data. Defaults to validation.

'validation'
source JsonSchemaSource

The source type to use for generating the resources JSON schema. It can be either key , model, or both where the latter accepts, when applicable, integer and string values for key identifiers in addition to the standard model schema generation. Defaults to model.

'model'

Returns:

Type Description
dict[str, Any]

The generated JSON schema of the model class.

Note

The schema generator class can be overridden to customize the logic used to generate the JSON schema. This can be done by subclassing the GenerateJsonSchema class and passing the subclass as the schema_generator argument.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_json_schema(  # type: ignore[override, unused-ignore]
    cls,
    by_alias: bool = True,
    ref_template: str = DEFAULT_REF_TEMPLATE,
    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
    mode: JsonSchemaMode = 'validation',
    source: JsonSchemaSource = 'model',
) -> dict[str, Any]:
    """Generates a JSON schema for a model class.

    Args:
        by_alias: Whether to use field aliases when generating the schema,
            i.e. if ``True``, fields will be serialized according to their
            alias, otherwise according to their attribute name.
            Defaults to ``True``.
        ref_template: The template format string used when generating
            reference names. Defaults to ``DEFAULT_REF_TEMPLATE``.
        schema_generator: The class to use for generating the JSON Schema.
        mode: The mode to use for generating the JSON Schema. It can be
            either ``validation`` or ``serialization`` where respectively
            the schema is generated for validating data or serializing
            data. Defaults to ``validation``.
        source: The source type to use for generating the resources JSON
            schema. It can be either ``key`` , ``model``, or ``both`` where
            the latter accepts, when applicable, integer and string values
            for key identifiers in addition to the standard model schema
            generation. Defaults to ``model``.

    Returns:
        The generated JSON schema of the model class.

    Note:
        The schema generator class can be overridden to customize the
        logic used to generate the JSON schema. This can be done by
        subclassing the `GenerateJsonSchema` class and passing the subclass
        as the `schema_generator` argument.
    """
    schema_generator_instance = schema_generator(
        by_alias=by_alias, ref_template=ref_template
    )
    if isinstance(cls.__pydantic_validator__, _mock_val_ser.MockValSer):
        cls.__pydantic_validator__.rebuild()
    return schema_generator_instance.generate(
        cls.__pydantic_core_schema__, mode=mode, source=source
    )

model_parametrized_name classmethod

model_parametrized_name(
    params: tuple[type[Any], ...],
) -> str

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default
params tuple[type[Any], ...]

Tuple of types of the class. Given a generic class Model with 2 type variables and a concrete model Model[str, int], the value (str, int) would be passed to params.

required

Returns:

Type Description
str

String representing the new class where params are passed to cls as type variables.

Raises:

Type Description
TypeError

Raised when trying to generate concrete names for non-generic models.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
@classmethod
def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
    """Compute the class name for parametrizations of generic classes.

    This method can be overridden to achieve a custom naming scheme for generic BaseModels.

    Args:
        params: Tuple of types of the class. Given a generic class
            `Model` with 2 type variables and a concrete model `Model[str, int]`,
            the value `(str, int)` would be passed to `params`.

    Returns:
        String representing the new class where `params` are passed to `cls` as type variables.

    Raises:
        TypeError: Raised when trying to generate concrete names for non-generic models.
    """
    if not issubclass(cls, typing.Generic):
        raise TypeError('Concrete names should only be generated for generic models.')

    # Any strings received should represent forward references, so we handle them specially below.
    # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future,
    # we may be able to remove this special case.
    param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params]
    params_component = ', '.join(param_names)
    return f'{cls.__name__}[{params_component}]'

model_post_init

model_post_init(__context: Any) -> None

Post-initialization method for the model class.

Override this method to perform additional initialization after the __init__ and model_construct methods have been called. This is useful in scenarios where it is necessary to perform additional initialization steps after the model has been fully initialized.

Parameters:

Name Type Description Default
__context Any

The context object passed to the model instance.

required
Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_post_init(self, __context: Any) -> None:
    """Post-initialization method for the model class.

    Override this method to perform additional initialization after the
    `__init__` and `model_construct` methods have been called. This is
    useful in scenarios where it is necessary to perform additional
    initialization steps after the model has been fully initialized.

    Args:
        __context: The context object passed to the model instance.
    """
    ...

model_rebuild classmethod

model_rebuild(
    *,
    force: bool = False,
    raise_errors: bool = True,
    _parent_namespace_depth: int = 2,
    _types_namespace: dict[str, Any] | None = None,
) -> bool | None

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default
force bool

Whether to force the rebuilding of the model schema. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True
_parent_namespace_depth int

The depth level of the parent namespace. Defaults to 2.

2
_types_namespace dict[str, Any] | None

The types namespace. Defaults to None.

None

Raises:

Type Description
PlateformeError

If an error occurred while rebuilding the model adapter and raise_errors is set to True.

PydanticUndefinedAnnotation

If PydanticUndefinedAnnotation occurs in__get_pydantic_core_schema__ and raise_errors is set to True.

Returns:

Type Description
bool | None

Returns None if the schema is already "complete" and rebuilding

bool | None

was not required. If rebuilding was required, returns True if

bool | None

rebuilding was successful, otherwise False if an error

bool | None

occurred and raise_errors is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_rebuild(  # type: ignore[override, unused-ignore]
    cls,
    *,
    force: bool = False,
    raise_errors: bool = True,
    _parent_namespace_depth: int = 2,
    _types_namespace: dict[str, Any] | None = None,
) -> bool | None:
    """Try to rebuild the pydantic-core schema for the model.

    This may be necessary when one of the annotations is a `ForwardRef`
    which could not be resolved during the initial attempt to build the
    schema, and automatic rebuilding fails.

    Args:
        force: Whether to force the rebuilding of the model schema.
            Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.
        _parent_namespace_depth: The depth level of the parent namespace.
            Defaults to 2.
        _types_namespace: The types namespace. Defaults to ``None``.

    Raises:
        PlateformeError: If an error occurred while rebuilding the model
            adapter and `raise_errors` is set to ``True``.
        PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation`
            occurs in`__get_pydantic_core_schema__` and `raise_errors` is
            set to ``True``.

    Returns:
        Returns ``None`` if the schema is already "complete" and rebuilding
        was not required. If rebuilding was required, returns ``True`` if
        rebuilding was successful, otherwise ``False`` if an error
        occurred and `raise_errors` is set to ``False``.
    """
    build_status: bool | None = None

    # Rebuild model
    build_status = super().model_rebuild(
        force=build_status or force,
        raise_errors=raise_errors,
        _parent_namespace_depth=_parent_namespace_depth,
        _types_namespace=_types_namespace,
    )

    # Rebuild model adapter
    if build_status:
        try:
            adapter = TypeAdapterList(cls)
            setattr(cls, '__pydantic_adapter__', adapter)
        except Exception as error:
            if not raise_errors:
                return False
            raise PlateformeError(
                f"Failed to rebuild model adapter for {cls.__name__!r}.",
                code='model-build-failed',
            )

    if build_status is not False:
        cls.model_config.pop('defer_build')

    return build_status

model_validate classmethod

model_validate(
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given object against the model.

Parameters:

Name Type Description Default
obj Any

The object to validate.

required
strict bool | None

Whether to enforce types strictly.

None
from_attributes bool | None

Whether to extract data from the object attributes.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given object against the model.

    Args:
        obj: The object to validate.
        strict: Whether to enforce types strictly.
        from_attributes: Whether to extract data from the object
            attributes.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_python(  # type: ignore
        obj,
        strict=strict,
        from_attributes=from_attributes,
        context=context,
    )

model_validate_json classmethod

model_validate_json(
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given JSON data against the model.

Parameters:

Name Type Description Default
json_data str | bytes | bytearray

The JSON data to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValueError

If json_data is not a JSON string.

ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_json(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given JSON data against the model.

    Args:
        json_data: The JSON data to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValueError: If `json_data` is not a JSON string.
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_json(  # type: ignore
        json_data, strict=strict, context=context
    )

model_validate_strings classmethod

model_validate_strings(
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given string object against the model.

Parameters:

Name Type Description Default
obj Any

The string object to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_strings(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given string object against the model.

    Args:
        obj: The string object to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_strings(  # type: ignore
        obj, strict=strict, context=context
    )

copy

copy(
    *,
    include: AbstractSetIntStr
    | MappingIntStrAny
    | None = None,
    exclude: AbstractSetIntStr
    | MappingIntStrAny
    | None = None,
    update: Dict[str, Any] | None = None,
    deep: bool = False,
) -> Model

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)
data = {**data, **(update or {})}
copied = self.model_validate(data)

Parameters:

Name Type Description Default
include AbstractSetIntStr | MappingIntStrAny | None

Optional set or mapping specifying which fields to include in the copied model.

None
exclude AbstractSetIntStr | MappingIntStrAny | None

Optional set or mapping specifying which fields to exclude in the copied model.

None
update Dict[str, Any] | None

Optional dictionary of field-value pairs to override field values in the copied model.

None
deep bool

If True, the values of fields that are Pydantic models will be deep-copied.

False

Returns:

Type Description
Model

A copy of the model with included, excluded and updated fields as specified.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
@typing_extensions.deprecated(
    'The `copy` method is deprecated; use `model_copy` instead. '
    'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
    category=None,
)
def copy(
    self: Model,
    *,
    include: AbstractSetIntStr | MappingIntStrAny | None = None,
    exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
    update: typing.Dict[str, Any] | None = None,  # noqa UP006
    deep: bool = False,
) -> Model:  # pragma: no cover
    """Returns a copy of the model.

    !!! warning "Deprecated"
        This method is now deprecated; use `model_copy` instead.

    If you need `include` or `exclude`, use:

    ```py
    data = self.model_dump(include=include, exclude=exclude, round_trip=True)
    data = {**data, **(update or {})}
    copied = self.model_validate(data)
    ```

    Args:
        include: Optional set or mapping specifying which fields to include in the copied model.
        exclude: Optional set or mapping specifying which fields to exclude in the copied model.
        update: Optional dictionary of field-value pairs to override field values in the copied model.
        deep: If True, the values of fields that are Pydantic models will be deep-copied.

    Returns:
        A copy of the model with included, excluded and updated fields as specified.
    """
    warnings.warn(
        'The `copy` method is deprecated; use `model_copy` instead. '
        'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
        category=PydanticDeprecatedSince20,
    )
    from .deprecated import copy_internals

    values = dict(
        copy_internals._iter(
            self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
        ),
        **(update or {}),
    )
    if self.__pydantic_private__ is None:
        private = None
    else:
        private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}

    if self.__pydantic_extra__ is None:
        extra: dict[str, Any] | None = None
    else:
        extra = self.__pydantic_extra__.copy()
        for k in list(self.__pydantic_extra__):
            if k not in values:  # k was in the exclude
                extra.pop(k)
        for k in list(values):
            if k in self.__pydantic_extra__:  # k must have come from extra
                extra[k] = values.pop(k)

    # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg
    if update:
        fields_set = self.__pydantic_fields_set__ | update.keys()
    else:
        fields_set = set(self.__pydantic_fields_set__)

    # removing excluded fields from `__pydantic_fields_set__`
    if exclude:
        fields_set -= set(exclude)

    return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep)

model_adapter

model_adapter() -> TypeAdapterList[BaseModel]

Get the model type adapter.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classproperty
def model_adapter(cls) -> TypeAdapterList['BaseModel']:
    """Get the model type adapter."""
    if not hasattr(cls, '__pydantic_adapter__'):
        raise AttributeError(
            "The model type adapter is not defined. This may be due to "
            "the model not being fully built or an error occurred during "
            "model construction."
        )
    return cls.__pydantic_adapter__

model_revalidate

model_revalidate(
    *,
    force: bool = False,
    raise_errors: bool = True,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> bool | None

Revalidate the model instance.

It revalidates the model instance in place, enforcing the types strictly if specified. If the model instance has already been validated, it will not be revalidated unless the force argument is set to True.

Parameters:

Name Type Description Default
force bool

Whether to force the revalidation of the model instance. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Raises:

Type Description
ValidationError

If the model instance could not be validated and raise_errors is set to True.

Returns:

Type Description
bool | None

Returns None if the model instance is already "validated" and

bool | None

revalidation was not required. If validation was required, returns

bool | None

True if validation was successful, otherwise False if an

bool | None

error occurred and raise_errors is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_revalidate(
    self,
    *,
    force: bool = False,
    raise_errors: bool = True,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> bool | None:
    """Revalidate the model instance.

    It revalidates the model instance in place, enforcing the types
    strictly if specified. If the model instance has already been
    validated, it will not be revalidated unless the `force` argument is
    set to ``True``.

    Args:
        force: Whether to force the revalidation of the model instance.
            Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Raises:
        ValidationError: If the model instance could not be validated and
            `raise_errors` is set to ``True``.

    Returns:
        Returns ``None`` if the model instance is already "validated" and
        revalidation was not required. If validation was required, returns
        ``True`` if validation was successful, otherwise ``False`` if an
        error occurred and `raise_errors` is set to ``False``.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    if not force and self.__pydantic_validated__:
        return None
    else:
        try:
            self.__pydantic_validated__ = False
            self.__pydantic_validator__.validate_python(
                self,
                strict=strict,
                from_attributes=True,
                context=context,
                self_instance=self,
            )
        except Exception as error:
            if raise_errors:
                raise error
            return False
        return True

model_update

model_update(
    obj: Any,
    *,
    update: dict[str, Any] | None = None,
    from_attributes: bool | None = None,
) -> None

Update the model with the given object and update dictionary.

Parameters:

Name Type Description Default
obj Any

The object to update the model with. It can be a dictionary or an object with attributes (if from_attributes is set to True). If it is a dictionary, the keys must match the model field names if extra fields are not allowed.

required
update dict[str, Any] | None

Values to add/modify within the model. Note that if assignment validation is not set to True, the integrity of the data is not validated when updating the model. Data should be trusted or pre-validated in this case. Defaults to None.

None
from_attributes bool | None

Whether to extract data from object attributes. Defaults to None.

None

Raises:

Type Description
ValidationError

If the object could not be validated.

ValueError

If strict or context are set when validate_assignment is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_update(
    self,
    obj: Any,
    *,
    update: dict[str, Any] | None = None,
    from_attributes: bool | None = None,
) -> None:
    """Update the model with the given object and update dictionary.

    Args:
        obj: The object to update the model with. It can be a dictionary
            or an object with attributes (if `from_attributes` is set to
            ``True``). If it is a dictionary, the keys must match the model
            field names if extra fields are not allowed.
        update: Values to add/modify within the model. Note that if
            assignment validation is not set to ``True``, the integrity of
            the data is not validated when updating the model. Data should
            be trusted or pre-validated in this case. Defaults to ``None``.
        from_attributes: Whether to extract data from object attributes.
            Defaults to ``None``.

    Raises:
        ValidationError: If the object could not be validated.
        ValueError: If `strict` or `context` are set when
            `validate_assignment` is set to ``False``.
    """
    # Collect update
    update = (update or {}).copy()
    if from_attributes:
        for field_name in self.model_fields:
            if hasattr(obj, field_name):
                update.setdefault(field_name, getattr(obj, field_name))
    elif isinstance(obj, dict):
        update = {**obj, **update}

    # Process update
    for key, value in update.items():
        if key in self.model_fields:
            self.__dict__[key] = value
        else:
            if self.model_config.extra == 'allow':
                if self.__pydantic_extra__ is None:
                    self.__pydantic_extra__ = {}
                self.__pydantic_extra__[key] = value
            elif self.model_config.extra == 'ignore':
                self.__dict__[key] = value
            else:
                raise ValueError(
                    f"Extra field {key!r} is not permitted on the "
                    f"model {self.__class__.__qualname__!r}."
                )

    # Update fields set
    self.__pydantic_fields_set__.update(update.keys())

model_validate_many classmethod

model_validate_many(
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given object collection against the model.

Parameters:

Name Type Description Default
obj Any

The object collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
from_attributes bool | None

Whether to extract data from the object collection items attributes.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_many(
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given object collection against the model.

    Args:
        obj: The object collection to validate.
        strict: Whether to enforce types strictly.
        from_attributes: Whether to extract data from the object
            collection items attributes.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_python(  # type: ignore
        obj,
        strict=strict,
        from_attributes=from_attributes,
        context=context,
    )

model_validate_json_many classmethod

model_validate_json_many(
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given JSON data collection against the model.

Parameters:

Name Type Description Default
json_data str | bytes | bytearray

The JSON data collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValueError

If json_data is not a JSON string.

ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_json_many(
    cls: type[Model],
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given JSON data collection against the model.

    Args:
        json_data: The JSON data collection to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValueError: If `json_data` is not a JSON string.
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_json(  # type: ignore
        json_data, strict=strict, context=context
    )

model_validate_strings_many classmethod

model_validate_strings_many(
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given string object collection against the model.

Parameters:

Name Type Description Default
obj Any

The string object collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_strings_many(
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given string object collection against the model.

    Args:
        obj: The string object collection to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_strings(  # type: ignore
        obj, strict=strict, context=context
    )

ProjectInfo

ProjectInfo(**data: Any)

Bases: BaseModel

The project information model.

The project information is resolved from either a config.toml or pyproject.toml configuration file. For the latter, it merges the standard Python project metadata with the [tool.plateforme] specific configuration. The Python standard fields used are: - name (required): The project name. - version (required): The project version. - authors: The authors of the project. - description: A short description of the project. - keywords: A list of keywords that describe the project. - license: The license information for the project. - maintainers: The maintainers of the project. - readme: The path to the project readme file or a string with the project readme content.

For more information, see also: https://packaging.python.org/en/latest/specifications/pyproject-toml

Initialize a model instance.

It initializes a model instance by parsing and validating input data from the data keyword arguments.

Parameters:

Name Type Description Default
**data Any

The input data to initialize the model instance.

{}

Raises:

Type Description
ValidationError

If the object could not be validated.

Note

The argument self is explicitly positional-only to allow self as a field name and data keyword argument.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def __init__(self, /, **data: Any) -> None:
    """Initialize a model instance.

    It initializes a model instance by parsing and validating input data
    from the `data` keyword arguments.

    Args:
        **data: The input data to initialize the model instance.

    Raises:
        ValidationError: If the object could not be validated.

    Note:
        The argument ``self`` is explicitly positional-only to allow
        ``self`` as a field name and data keyword argument.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    self.__pydantic_validator__.validate_python(data, self_instance=self)

model_extra property

model_extra: dict[str, Any] | None

Get extra fields set during validation.

Returns:

Type Description
dict[str, Any] | None

A dictionary of extra fields, or None if config.extra is not set to "allow".

model_fields_set property

model_fields_set: set[str]

Returns the set of fields that have been explicitly set on this model instance.

Returns:

Type Description
set[str]

A set of strings representing the fields that have been set, i.e. that were not filled from defaults.

model_construct classmethod

model_construct(
    _fields_set: set[str] | None = None, **data: Any
) -> Model

Creates a new instance of the model class with validated data.

Creates a new model setting __dict__ and __pydantic_fields_set__ from trusted or pre-validated data. Default values are respected, but no other validation is performed. It behaves as if model_config.extra = 'allow' was set since it adds all passed values.

Parameters:

Name Type Description Default
_fields_set set[str] | None

The set of field names accepted by the model instance.

None
**data Any

Trusted or pre-validated input data to initialize the model. It is used to set the __dict__ attribute of the model.

{}

Returns:

Type Description
Model

A new instance of the model class with validated data.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_construct(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    _fields_set: set[str] | None = None,
    **data: Any,
) -> Model:
    """Creates a new instance of the model class with validated data.

    Creates a new model setting `__dict__` and `__pydantic_fields_set__`
    from trusted or pre-validated data. Default values are respected, but
    no other validation is performed. It behaves as if
    `model_config.extra = 'allow'` was set since it adds all passed values.

    Args:
        _fields_set: The set of field names accepted by the model instance.
        **data: Trusted or pre-validated input data to initialize the
            model. It is used to set the `__dict__` attribute of the model.

    Returns:
        A new instance of the model class with validated data.
    """
    model = super().model_construct(_fields_set, **data)

    # Remove default initialization of instrumented resource fields, as
    # they are not needed when constructing a resource instance directly,
    # i.e. defaults are already set and stored in the database.
    if cls.__pydantic_owner__ == 'resource':
        resource = cls.__pydantic_resource__
        for name in getattr(resource, 'resource_attributes'):
            if _fields_set and name in _fields_set:
                continue
            model.__dict__.pop(name, None)

    return model

model_copy

model_copy(
    *,
    update: dict[str, Any] | None = None,
    deep: bool = False,
) -> Model

Returns a copy of the model.

Parameters:

Name Type Description Default
update dict[str, Any] | None

Values to add/modify within the new model. Note that if assignment validation is not set to True, the integrity of the data is not validated when creating the new model. Data should be trusted or pre-validated in this case.

None
deep bool

Set to True to make a deep copy of the model.

False

Returns:

Type Description
Model

A new copy of the model instance with the updated values.

Raises:

Type Description
ValidationError

If the object could not be validated.

ValueError

If strict or context are set when validate_assignment is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_copy(  # type: ignore[override, unused-ignore]
    self: Model,
    *,
    update: dict[str, Any] | None = None,
    deep: bool = False,
) -> Model:
    """Returns a copy of the model.

    Args:
        update: Values to add/modify within the new model. Note that if
            assignment validation is not set to ``True``, the integrity of
            the data is not validated when creating the new model. Data
            should be trusted or pre-validated in this case.
        deep: Set to ``True`` to make a deep copy of the model.

    Returns:
        A new copy of the model instance with the updated values.

    Raises:
        ValidationError: If the object could not be validated.
        ValueError: If `strict` or `context` are set when
            `validate_assignment` is set to ``False``.
    """
    copied = self.__deepcopy__() if deep else self.__copy__()
    if update:
        copied.model_update(update, from_attributes=False)
    return copied

model_dump

model_dump(
    *,
    mode: Literal["json", "python", "raw"] | str = "python",
    include: IncEx | None = None,
    exclude: IncEx | None = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> dict[str, Any]

Generate a dictionary representation of the model.

It is used to dump the model instance to a dictionary representation of the model, optionally specifying which fields to include or exclude.

Parameters:

Name Type Description Default
mode Literal['json', 'python', 'raw'] | str

The mode in which to_python should run: - If mode is json, the output will only contain JSON serializable types. - If mode is python, the output may contain non JSON serializable Python objects. - If mode is raw, the output will contain raw values. Defaults to python.

'python'
include IncEx | None

A list of fields to include in the output. Defaults to None.

None
exclude IncEx | None

A list of fields to exclude from the output. Defaults to None.

None
by_alias bool

Whether to use the field's alias in the dictionary key if defined. Defaults to False.

False
exclude_unset bool

Whether to exclude fields that have not been explicitly set. Defaults to False.

False
exclude_defaults bool

Whether to exclude fields that are set to their default value. Defaults to False.

False
exclude_none bool

Whether to exclude fields that have a value of None. Defaults to False.

False
round_trip bool

If True, dumped values should be valid as input for non-idempotent types such as Json[T]. Defaults to False.

False
warnings bool

Whether to log warnings when invalid fields are encountered. Defaults to True.

True

Returns:

Type Description
dict[str, Any]

A dictionary representation of the model.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_dump(  # type: ignore[override, unused-ignore]
    self,
    *,
    mode: Literal['json', 'python', 'raw'] | str = 'python',
    include: IncEx | None = None,
    exclude: IncEx | None = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> dict[str, Any]:
    """Generate a dictionary representation of the model.

    It is used to dump the model instance to a dictionary representation of
    the model, optionally specifying which fields to include or exclude.

    Args:
        mode: The mode in which `to_python` should run:
            - If mode is ``json``, the output will only contain JSON
                serializable types.
            - If mode is ``python``, the output may contain non JSON
                serializable Python objects.
            - If mode is ``raw``, the output will contain raw values.
            Defaults to ``python``.
        include: A list of fields to include in the output.
            Defaults to ``None``.
        exclude: A list of fields to exclude from the output.
            Defaults to ``None``.
        by_alias: Whether to use the field's alias in the dictionary key if
            defined. Defaults to ``False``.
        exclude_unset: Whether to exclude fields that have not been
            explicitly set. Defaults to ``False``.
        exclude_defaults: Whether to exclude fields that are set to their
            default value. Defaults to ``False``.
        exclude_none: Whether to exclude fields that have a value of
            ``None``. Defaults to ``False``.
        round_trip: If ``True``, dumped values should be valid as input for
            non-idempotent types such as `Json[T]`. Defaults to ``False``.
        warnings: Whether to log warnings when invalid fields are
            encountered. Defaults to ``True``.

    Returns:
        A dictionary representation of the model.
    """
    if mode != 'raw':
        return self.__pydantic_serializer__.to_python(  # type: ignore
            self,
            mode=mode,
            by_alias=by_alias,
            include=include,  # type: ignore
            exclude=exclude,  # type: ignore
            exclude_unset=exclude_unset,
            exclude_defaults=exclude_defaults,
            exclude_none=exclude_none,
            round_trip=round_trip,
            warnings=warnings,
        )

    # Handle raw mode
    result: dict[str, Any] = {}
    for field_name, field_info in self.model_fields.items():
        if not hasattr(self, field_name):
            continue
        value = getattr(self, field_name)
        # Skip excluded fields
        if include is not None and field_name not in include:
            continue
        if exclude is not None and field_name in exclude:
            continue
        if exclude_unset and field_name not in self.model_fields_set:
            continue
        if exclude_defaults and value == field_info.default:
            continue
        if exclude_none and value is None:
            continue
        # Add field value
        if by_alias and field_info.alias:
            result[field_info.alias] = value
        else:
            result[field_name] = value
    return result

model_dump_json

model_dump_json(
    *,
    indent: int | None = None,
    include: IncEx = None,
    exclude: IncEx = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> str

Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json

Generates a JSON representation of the model using Pydantic's to_json method.

Parameters:

Name Type Description Default
indent int | None

Indentation to use in the JSON output. If None is passed, the output will be compact.

None
include IncEx

Field(s) to include in the JSON output.

None
exclude IncEx

Field(s) to exclude from the JSON output.

None
by_alias bool

Whether to serialize using field aliases.

False
exclude_unset bool

Whether to exclude fields that have not been explicitly set.

False
exclude_defaults bool

Whether to exclude fields that are set to their default value.

False
exclude_none bool

Whether to exclude fields that have a value of None.

False
round_trip bool

If True, dumped values should be valid as input for non-idempotent types such as Json[T].

False
warnings bool

Whether to log warnings when invalid fields are encountered.

True

Returns:

Type Description
str

A JSON string representation of the model.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
def model_dump_json(
    self,
    *,
    indent: int | None = None,
    include: IncEx = None,
    exclude: IncEx = None,
    by_alias: bool = False,
    exclude_unset: bool = False,
    exclude_defaults: bool = False,
    exclude_none: bool = False,
    round_trip: bool = False,
    warnings: bool = True,
) -> str:
    """Usage docs: https://docs.pydantic.dev/2.6/concepts/serialization/#modelmodel_dump_json

    Generates a JSON representation of the model using Pydantic's `to_json` method.

    Args:
        indent: Indentation to use in the JSON output. If None is passed, the output will be compact.
        include: Field(s) to include in the JSON output.
        exclude: Field(s) to exclude from the JSON output.
        by_alias: Whether to serialize using field aliases.
        exclude_unset: Whether to exclude fields that have not been explicitly set.
        exclude_defaults: Whether to exclude fields that are set to their default value.
        exclude_none: Whether to exclude fields that have a value of `None`.
        round_trip: If True, dumped values should be valid as input for non-idempotent types such as Json[T].
        warnings: Whether to log warnings when invalid fields are encountered.

    Returns:
        A JSON string representation of the model.
    """
    return self.__pydantic_serializer__.to_json(
        self,
        indent=indent,
        include=include,
        exclude=exclude,
        by_alias=by_alias,
        exclude_unset=exclude_unset,
        exclude_defaults=exclude_defaults,
        exclude_none=exclude_none,
        round_trip=round_trip,
        warnings=warnings,
    ).decode()

model_json_schema classmethod

model_json_schema(
    by_alias: bool = True,
    ref_template: str = DEFAULT_REF_TEMPLATE,
    schema_generator: type[
        GenerateJsonSchema
    ] = GenerateJsonSchema,
    mode: JsonSchemaMode = "validation",
    source: JsonSchemaSource = "model",
) -> dict[str, Any]

Generates a JSON schema for a model class.

Parameters:

Name Type Description Default
by_alias bool

Whether to use field aliases when generating the schema, i.e. if True, fields will be serialized according to their alias, otherwise according to their attribute name. Defaults to True.

True
ref_template str

The template format string used when generating reference names. Defaults to DEFAULT_REF_TEMPLATE.

DEFAULT_REF_TEMPLATE
schema_generator type[GenerateJsonSchema]

The class to use for generating the JSON Schema.

GenerateJsonSchema
mode JsonSchemaMode

The mode to use for generating the JSON Schema. It can be either validation or serialization where respectively the schema is generated for validating data or serializing data. Defaults to validation.

'validation'
source JsonSchemaSource

The source type to use for generating the resources JSON schema. It can be either key , model, or both where the latter accepts, when applicable, integer and string values for key identifiers in addition to the standard model schema generation. Defaults to model.

'model'

Returns:

Type Description
dict[str, Any]

The generated JSON schema of the model class.

Note

The schema generator class can be overridden to customize the logic used to generate the JSON schema. This can be done by subclassing the GenerateJsonSchema class and passing the subclass as the schema_generator argument.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_json_schema(  # type: ignore[override, unused-ignore]
    cls,
    by_alias: bool = True,
    ref_template: str = DEFAULT_REF_TEMPLATE,
    schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
    mode: JsonSchemaMode = 'validation',
    source: JsonSchemaSource = 'model',
) -> dict[str, Any]:
    """Generates a JSON schema for a model class.

    Args:
        by_alias: Whether to use field aliases when generating the schema,
            i.e. if ``True``, fields will be serialized according to their
            alias, otherwise according to their attribute name.
            Defaults to ``True``.
        ref_template: The template format string used when generating
            reference names. Defaults to ``DEFAULT_REF_TEMPLATE``.
        schema_generator: The class to use for generating the JSON Schema.
        mode: The mode to use for generating the JSON Schema. It can be
            either ``validation`` or ``serialization`` where respectively
            the schema is generated for validating data or serializing
            data. Defaults to ``validation``.
        source: The source type to use for generating the resources JSON
            schema. It can be either ``key`` , ``model``, or ``both`` where
            the latter accepts, when applicable, integer and string values
            for key identifiers in addition to the standard model schema
            generation. Defaults to ``model``.

    Returns:
        The generated JSON schema of the model class.

    Note:
        The schema generator class can be overridden to customize the
        logic used to generate the JSON schema. This can be done by
        subclassing the `GenerateJsonSchema` class and passing the subclass
        as the `schema_generator` argument.
    """
    schema_generator_instance = schema_generator(
        by_alias=by_alias, ref_template=ref_template
    )
    if isinstance(cls.__pydantic_validator__, _mock_val_ser.MockValSer):
        cls.__pydantic_validator__.rebuild()
    return schema_generator_instance.generate(
        cls.__pydantic_core_schema__, mode=mode, source=source
    )

model_parametrized_name classmethod

model_parametrized_name(
    params: tuple[type[Any], ...],
) -> str

Compute the class name for parametrizations of generic classes.

This method can be overridden to achieve a custom naming scheme for generic BaseModels.

Parameters:

Name Type Description Default
params tuple[type[Any], ...]

Tuple of types of the class. Given a generic class Model with 2 type variables and a concrete model Model[str, int], the value (str, int) would be passed to params.

required

Returns:

Type Description
str

String representing the new class where params are passed to cls as type variables.

Raises:

Type Description
TypeError

Raised when trying to generate concrete names for non-generic models.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
@classmethod
def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
    """Compute the class name for parametrizations of generic classes.

    This method can be overridden to achieve a custom naming scheme for generic BaseModels.

    Args:
        params: Tuple of types of the class. Given a generic class
            `Model` with 2 type variables and a concrete model `Model[str, int]`,
            the value `(str, int)` would be passed to `params`.

    Returns:
        String representing the new class where `params` are passed to `cls` as type variables.

    Raises:
        TypeError: Raised when trying to generate concrete names for non-generic models.
    """
    if not issubclass(cls, typing.Generic):
        raise TypeError('Concrete names should only be generated for generic models.')

    # Any strings received should represent forward references, so we handle them specially below.
    # If we eventually move toward wrapping them in a ForwardRef in __class_getitem__ in the future,
    # we may be able to remove this special case.
    param_names = [param if isinstance(param, str) else _repr.display_as_type(param) for param in params]
    params_component = ', '.join(param_names)
    return f'{cls.__name__}[{params_component}]'

model_post_init

model_post_init(__context: Any) -> None

Post-initialization method for the model class.

Override this method to perform additional initialization after the __init__ and model_construct methods have been called. This is useful in scenarios where it is necessary to perform additional initialization steps after the model has been fully initialized.

Parameters:

Name Type Description Default
__context Any

The context object passed to the model instance.

required
Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_post_init(self, __context: Any) -> None:
    """Post-initialization method for the model class.

    Override this method to perform additional initialization after the
    `__init__` and `model_construct` methods have been called. This is
    useful in scenarios where it is necessary to perform additional
    initialization steps after the model has been fully initialized.

    Args:
        __context: The context object passed to the model instance.
    """
    ...

model_rebuild classmethod

model_rebuild(
    *,
    force: bool = False,
    raise_errors: bool = True,
    _parent_namespace_depth: int = 2,
    _types_namespace: dict[str, Any] | None = None,
) -> bool | None

Try to rebuild the pydantic-core schema for the model.

This may be necessary when one of the annotations is a ForwardRef which could not be resolved during the initial attempt to build the schema, and automatic rebuilding fails.

Parameters:

Name Type Description Default
force bool

Whether to force the rebuilding of the model schema. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True
_parent_namespace_depth int

The depth level of the parent namespace. Defaults to 2.

2
_types_namespace dict[str, Any] | None

The types namespace. Defaults to None.

None

Raises:

Type Description
PlateformeError

If an error occurred while rebuilding the model adapter and raise_errors is set to True.

PydanticUndefinedAnnotation

If PydanticUndefinedAnnotation occurs in__get_pydantic_core_schema__ and raise_errors is set to True.

Returns:

Type Description
bool | None

Returns None if the schema is already "complete" and rebuilding

bool | None

was not required. If rebuilding was required, returns True if

bool | None

rebuilding was successful, otherwise False if an error

bool | None

occurred and raise_errors is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_rebuild(  # type: ignore[override, unused-ignore]
    cls,
    *,
    force: bool = False,
    raise_errors: bool = True,
    _parent_namespace_depth: int = 2,
    _types_namespace: dict[str, Any] | None = None,
) -> bool | None:
    """Try to rebuild the pydantic-core schema for the model.

    This may be necessary when one of the annotations is a `ForwardRef`
    which could not be resolved during the initial attempt to build the
    schema, and automatic rebuilding fails.

    Args:
        force: Whether to force the rebuilding of the model schema.
            Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.
        _parent_namespace_depth: The depth level of the parent namespace.
            Defaults to 2.
        _types_namespace: The types namespace. Defaults to ``None``.

    Raises:
        PlateformeError: If an error occurred while rebuilding the model
            adapter and `raise_errors` is set to ``True``.
        PydanticUndefinedAnnotation: If `PydanticUndefinedAnnotation`
            occurs in`__get_pydantic_core_schema__` and `raise_errors` is
            set to ``True``.

    Returns:
        Returns ``None`` if the schema is already "complete" and rebuilding
        was not required. If rebuilding was required, returns ``True`` if
        rebuilding was successful, otherwise ``False`` if an error
        occurred and `raise_errors` is set to ``False``.
    """
    build_status: bool | None = None

    # Rebuild model
    build_status = super().model_rebuild(
        force=build_status or force,
        raise_errors=raise_errors,
        _parent_namespace_depth=_parent_namespace_depth,
        _types_namespace=_types_namespace,
    )

    # Rebuild model adapter
    if build_status:
        try:
            adapter = TypeAdapterList(cls)
            setattr(cls, '__pydantic_adapter__', adapter)
        except Exception as error:
            if not raise_errors:
                return False
            raise PlateformeError(
                f"Failed to rebuild model adapter for {cls.__name__!r}.",
                code='model-build-failed',
            )

    if build_status is not False:
        cls.model_config.pop('defer_build')

    return build_status

model_validate classmethod

model_validate(
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given object against the model.

Parameters:

Name Type Description Default
obj Any

The object to validate.

required
strict bool | None

Whether to enforce types strictly.

None
from_attributes bool | None

Whether to extract data from the object attributes.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given object against the model.

    Args:
        obj: The object to validate.
        strict: Whether to enforce types strictly.
        from_attributes: Whether to extract data from the object
            attributes.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_python(  # type: ignore
        obj,
        strict=strict,
        from_attributes=from_attributes,
        context=context,
    )

model_validate_json classmethod

model_validate_json(
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given JSON data against the model.

Parameters:

Name Type Description Default
json_data str | bytes | bytearray

The JSON data to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValueError

If json_data is not a JSON string.

ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_json(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given JSON data against the model.

    Args:
        json_data: The JSON data to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValueError: If `json_data` is not a JSON string.
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_json(  # type: ignore
        json_data, strict=strict, context=context
    )

model_validate_strings classmethod

model_validate_strings(
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model

Validate the given string object against the model.

Parameters:

Name Type Description Default
obj Any

The string object to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Model

A validated model instance.

Raises:

Type Description
ValidationError

If the object could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_strings(  # type: ignore[override, unused-ignore]
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Model:
    """Validate the given string object against the model.

    Args:
        obj: The string object to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated model instance.

    Raises:
        ValidationError: If the object could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.__pydantic_validator__.validate_strings(  # type: ignore
        obj, strict=strict, context=context
    )

copy

copy(
    *,
    include: AbstractSetIntStr
    | MappingIntStrAny
    | None = None,
    exclude: AbstractSetIntStr
    | MappingIntStrAny
    | None = None,
    update: Dict[str, Any] | None = None,
    deep: bool = False,
) -> Model

Returns a copy of the model.

Deprecated

This method is now deprecated; use model_copy instead.

If you need include or exclude, use:

data = self.model_dump(include=include, exclude=exclude, round_trip=True)
data = {**data, **(update or {})}
copied = self.model_validate(data)

Parameters:

Name Type Description Default
include AbstractSetIntStr | MappingIntStrAny | None

Optional set or mapping specifying which fields to include in the copied model.

None
exclude AbstractSetIntStr | MappingIntStrAny | None

Optional set or mapping specifying which fields to exclude in the copied model.

None
update Dict[str, Any] | None

Optional dictionary of field-value pairs to override field values in the copied model.

None
deep bool

If True, the values of fields that are Pydantic models will be deep-copied.

False

Returns:

Type Description
Model

A copy of the model with included, excluded and updated fields as specified.

Source code in .venv/lib/python3.12/site-packages/pydantic/main.py
@typing_extensions.deprecated(
    'The `copy` method is deprecated; use `model_copy` instead. '
    'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
    category=None,
)
def copy(
    self: Model,
    *,
    include: AbstractSetIntStr | MappingIntStrAny | None = None,
    exclude: AbstractSetIntStr | MappingIntStrAny | None = None,
    update: typing.Dict[str, Any] | None = None,  # noqa UP006
    deep: bool = False,
) -> Model:  # pragma: no cover
    """Returns a copy of the model.

    !!! warning "Deprecated"
        This method is now deprecated; use `model_copy` instead.

    If you need `include` or `exclude`, use:

    ```py
    data = self.model_dump(include=include, exclude=exclude, round_trip=True)
    data = {**data, **(update or {})}
    copied = self.model_validate(data)
    ```

    Args:
        include: Optional set or mapping specifying which fields to include in the copied model.
        exclude: Optional set or mapping specifying which fields to exclude in the copied model.
        update: Optional dictionary of field-value pairs to override field values in the copied model.
        deep: If True, the values of fields that are Pydantic models will be deep-copied.

    Returns:
        A copy of the model with included, excluded and updated fields as specified.
    """
    warnings.warn(
        'The `copy` method is deprecated; use `model_copy` instead. '
        'See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`.',
        category=PydanticDeprecatedSince20,
    )
    from .deprecated import copy_internals

    values = dict(
        copy_internals._iter(
            self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
        ),
        **(update or {}),
    )
    if self.__pydantic_private__ is None:
        private = None
    else:
        private = {k: v for k, v in self.__pydantic_private__.items() if v is not PydanticUndefined}

    if self.__pydantic_extra__ is None:
        extra: dict[str, Any] | None = None
    else:
        extra = self.__pydantic_extra__.copy()
        for k in list(self.__pydantic_extra__):
            if k not in values:  # k was in the exclude
                extra.pop(k)
        for k in list(values):
            if k in self.__pydantic_extra__:  # k must have come from extra
                extra[k] = values.pop(k)

    # new `__pydantic_fields_set__` can have unset optional fields with a set value in `update` kwarg
    if update:
        fields_set = self.__pydantic_fields_set__ | update.keys()
    else:
        fields_set = set(self.__pydantic_fields_set__)

    # removing excluded fields from `__pydantic_fields_set__`
    if exclude:
        fields_set -= set(exclude)

    return copy_internals._copy_and_set_values(self, values, fields_set, extra, private, deep=deep)

model_adapter

model_adapter() -> TypeAdapterList[BaseModel]

Get the model type adapter.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classproperty
def model_adapter(cls) -> TypeAdapterList['BaseModel']:
    """Get the model type adapter."""
    if not hasattr(cls, '__pydantic_adapter__'):
        raise AttributeError(
            "The model type adapter is not defined. This may be due to "
            "the model not being fully built or an error occurred during "
            "model construction."
        )
    return cls.__pydantic_adapter__

model_revalidate

model_revalidate(
    *,
    force: bool = False,
    raise_errors: bool = True,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> bool | None

Revalidate the model instance.

It revalidates the model instance in place, enforcing the types strictly if specified. If the model instance has already been validated, it will not be revalidated unless the force argument is set to True.

Parameters:

Name Type Description Default
force bool

Whether to force the revalidation of the model instance. Defaults to False.

False
raise_errors bool

Whether to raise errors or fail silently. Defaults to True.

True
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Raises:

Type Description
ValidationError

If the model instance could not be validated and raise_errors is set to True.

Returns:

Type Description
bool | None

Returns None if the model instance is already "validated" and

bool | None

revalidation was not required. If validation was required, returns

bool | None

True if validation was successful, otherwise False if an

bool | None

error occurred and raise_errors is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_revalidate(
    self,
    *,
    force: bool = False,
    raise_errors: bool = True,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> bool | None:
    """Revalidate the model instance.

    It revalidates the model instance in place, enforcing the types
    strictly if specified. If the model instance has already been
    validated, it will not be revalidated unless the `force` argument is
    set to ``True``.

    Args:
        force: Whether to force the revalidation of the model instance.
            Defaults to ``False``.
        raise_errors: Whether to raise errors or fail silently.
            Defaults to ``True``.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Raises:
        ValidationError: If the model instance could not be validated and
            `raise_errors` is set to ``True``.

    Returns:
        Returns ``None`` if the model instance is already "validated" and
        revalidation was not required. If validation was required, returns
        ``True`` if validation was successful, otherwise ``False`` if an
        error occurred and `raise_errors` is set to ``False``.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    if not force and self.__pydantic_validated__:
        return None
    else:
        try:
            self.__pydantic_validated__ = False
            self.__pydantic_validator__.validate_python(
                self,
                strict=strict,
                from_attributes=True,
                context=context,
                self_instance=self,
            )
        except Exception as error:
            if raise_errors:
                raise error
            return False
        return True

model_update

model_update(
    obj: Any,
    *,
    update: dict[str, Any] | None = None,
    from_attributes: bool | None = None,
) -> None

Update the model with the given object and update dictionary.

Parameters:

Name Type Description Default
obj Any

The object to update the model with. It can be a dictionary or an object with attributes (if from_attributes is set to True). If it is a dictionary, the keys must match the model field names if extra fields are not allowed.

required
update dict[str, Any] | None

Values to add/modify within the model. Note that if assignment validation is not set to True, the integrity of the data is not validated when updating the model. Data should be trusted or pre-validated in this case. Defaults to None.

None
from_attributes bool | None

Whether to extract data from object attributes. Defaults to None.

None

Raises:

Type Description
ValidationError

If the object could not be validated.

ValueError

If strict or context are set when validate_assignment is set to False.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
def model_update(
    self,
    obj: Any,
    *,
    update: dict[str, Any] | None = None,
    from_attributes: bool | None = None,
) -> None:
    """Update the model with the given object and update dictionary.

    Args:
        obj: The object to update the model with. It can be a dictionary
            or an object with attributes (if `from_attributes` is set to
            ``True``). If it is a dictionary, the keys must match the model
            field names if extra fields are not allowed.
        update: Values to add/modify within the model. Note that if
            assignment validation is not set to ``True``, the integrity of
            the data is not validated when updating the model. Data should
            be trusted or pre-validated in this case. Defaults to ``None``.
        from_attributes: Whether to extract data from object attributes.
            Defaults to ``None``.

    Raises:
        ValidationError: If the object could not be validated.
        ValueError: If `strict` or `context` are set when
            `validate_assignment` is set to ``False``.
    """
    # Collect update
    update = (update or {}).copy()
    if from_attributes:
        for field_name in self.model_fields:
            if hasattr(obj, field_name):
                update.setdefault(field_name, getattr(obj, field_name))
    elif isinstance(obj, dict):
        update = {**obj, **update}

    # Process update
    for key, value in update.items():
        if key in self.model_fields:
            self.__dict__[key] = value
        else:
            if self.model_config.extra == 'allow':
                if self.__pydantic_extra__ is None:
                    self.__pydantic_extra__ = {}
                self.__pydantic_extra__[key] = value
            elif self.model_config.extra == 'ignore':
                self.__dict__[key] = value
            else:
                raise ValueError(
                    f"Extra field {key!r} is not permitted on the "
                    f"model {self.__class__.__qualname__!r}."
                )

    # Update fields set
    self.__pydantic_fields_set__.update(update.keys())

model_validate_many classmethod

model_validate_many(
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given object collection against the model.

Parameters:

Name Type Description Default
obj Any

The object collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
from_attributes bool | None

Whether to extract data from the object collection items attributes.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_many(
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    from_attributes: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given object collection against the model.

    Args:
        obj: The object collection to validate.
        strict: Whether to enforce types strictly.
        from_attributes: Whether to extract data from the object
            collection items attributes.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_python(  # type: ignore
        obj,
        strict=strict,
        from_attributes=from_attributes,
        context=context,
    )

model_validate_json_many classmethod

model_validate_json_many(
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given JSON data collection against the model.

Parameters:

Name Type Description Default
json_data str | bytes | bytearray

The JSON data collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValueError

If json_data is not a JSON string.

ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_json_many(
    cls: type[Model],
    json_data: str | bytes | bytearray,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given JSON data collection against the model.

    Args:
        json_data: The JSON data collection to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValueError: If `json_data` is not a JSON string.
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_json(  # type: ignore
        json_data, strict=strict, context=context
    )

model_validate_strings_many classmethod

model_validate_strings_many(
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]

Validate the given string object collection against the model.

Parameters:

Name Type Description Default
obj Any

The string object collection to validate.

required
strict bool | None

Whether to enforce types strictly.

None
context dict[str, Any] | None

Extra variables to pass to the validator.

None

Returns:

Type Description
Sequence[Model]

A validated collection of model instances.

Raises:

Type Description
ValidationError

If the object collection could not be validated.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/schema/models.py
@classmethod
def model_validate_strings_many(
    cls: type[Model],
    obj: Any,
    *,
    strict: bool | None = None,
    context: dict[str, Any] | None = None,
) -> Sequence[Model]:
    """Validate the given string object collection against the model.

    Args:
        obj: The string object collection to validate.
        strict: Whether to enforce types strictly.
        context: Extra variables to pass to the validator.

    Returns:
        A validated collection of model instances.

    Raises:
        ValidationError: If the object collection could not be validated.
    """
    # Tell pytest to hide this function from tracebacks
    __tracebackhide__ = True

    return cls.model_adapter.validate_strings(  # type: ignore
        obj, strict=strict, context=context
    )

import_project_info

import_project_info(
    dirname: str | None = None,
    *,
    force_resolution: bool = True,
) -> ProjectInfo

Import the project information from the given directory.

It imports the project information from either a config.toml or pyproject.toml configuration file found in the given path directory. The project configuration is then parsed and returned as a ProjectInfo instance.

Parameters:

Name Type Description Default
dirname str | None

The absolute or relative system path to search from. Defaults to current working directory.

None
force_resolution bool

Whether to search recursively up the directory tree if the file is not found in the given directory, until the root is reached or a valid file is found. Defaults to True.

True

Returns:

Type Description
ProjectInfo

The project information parsed from the project configuration file.

Raises:

Type Description
FileExistsError

If multiple project configuration files are found in the same directory.

FileNotFoundError

If no project configuration files are found in the given path directory.

ImportError

If the project configuration file cannot be parsed or has no valid project configuration entries.

NotImplementedError

If the project configuration file is not supported.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/projects.py
def import_project_info(
    dirname: str | None = None, *, force_resolution: bool = True,
) -> ProjectInfo:
    """Import the project information from the given directory.

    It imports the project information from either a ``config.toml`` or
    ``pyproject.toml`` configuration file found in the given path directory.
    The project configuration is then parsed and returned as a `ProjectInfo`
    instance.

    Args:
        dirname: The absolute or relative system path to search from.
            Defaults to current working directory.
        force_resolution: Whether to search recursively up the directory tree
            if the file is not found in the given directory, until the root is
            reached or a valid file is found. Defaults to ``True``.

    Returns:
        The project information parsed from the project configuration file.

    Raises:
        FileExistsError: If multiple project configuration files are found in
            the same directory.
        FileNotFoundError: If no project configuration files are
            found in the given path directory.
        ImportError: If the project configuration file cannot be parsed or has
            no valid project configuration entries.
        NotImplementedError: If the project configuration file is not
            supported.
    """
    project_path = resolve_project_path(
        dirname, force_resolution=force_resolution
    )

    if project_path is None:
        raise FileNotFoundError(
            f"No project configuration files found for the path: "
            f"{project_path or Path.cwd()}."
        )

    try:
        with open(project_path, 'rb') as project_file:
            project_data = tomllib.load(project_file)
    except tomllib.TOMLDecodeError as error:
        raise ImportError(
            f"Failed to parse project configuration file: {project_path}."
        ) from error

    if project_path.name == 'config.toml':
        project_config = project_data.get('plateforme', {})
    elif project_path.name == 'pyproject.toml':
        project_tool = project_data.get('tool', {})
        project_config = {
            **project_data.get('project', {}),
            **project_tool.get('plateforme', {}),
        }
    else:
        raise NotImplementedError(
            f"Unsupported project configuration file: {project_file}."
        )

    if not project_config:
        raise ImportError(
            f"No valid project configuration entries found in the TOML file: "
            f"{project_path}. Make sure to include either the '[project]' "
            f"section for 'pyproject.toml' files, or the '[plateforme]' "
            f"section for 'config.toml' files."
        )

    return ProjectInfo(**project_config, directory=project_path.parent)

resolve_project_path

resolve_project_path(
    dirname: str | None = None,
    *,
    force_resolution: bool = True,
) -> Path | None

Find the project configuration file path within the given directory.

It searches for a valid config.toml or pyproject.toml file within the given directory. If no file is found in the provided directory and force_resolution is set to True, it will recursively look up the directory tree until the root is reached or a valid file is found. Otherwise, it will return None.

Parameters:

Name Type Description Default
dirname str | None

The absolute or relative system path to search from. Defaults to current working directory.

None
force_resolution bool

Whether to search recursively up the directory tree if the file is not found in the given directory, until the root is reached or a valid file is found. Defaults to True.

True

Returns:

Type Description
Path | None

The path to the project file if found, otherwise None.

Raises:

Type Description
FileNotFoundError

If the provided path does not exist.

FileExistsError

If multiple project configuration files are found in the same directory.

Source code in .venv/lib/python3.12/site-packages/plateforme/core/projects.py
def resolve_project_path(
    dirname: str | None = None, *, force_resolution: bool = True,
) -> Path | None:
    """Find the project configuration file path within the given directory.

    It searches for a valid ``config.toml`` or ``pyproject.toml`` file within
    the given directory. If no file is found in the provided directory and
    `force_resolution` is set to ``True``, it will recursively look up the
    directory tree until the root is reached or a valid file is found.
    Otherwise, it will return ``None``.

    Args:
        dirname: The absolute or relative system path to search from.
            Defaults to current working directory.
        force_resolution: Whether to search recursively up the directory tree
            if the file is not found in the given directory, until the root is
            reached or a valid file is found. Defaults to ``True``.

    Returns:
        The path to the project file if found, otherwise ``None``.

    Raises:
        FileNotFoundError: If the provided path does not exist.
        FileExistsError: If multiple project configuration files are found in
            the same directory.
    """
    lookup_path = Path(dirname).resolve() if dirname else Path.cwd()

    # Validate provided path
    if not lookup_path.is_dir():
        lookup_path = lookup_path.parent
    if not lookup_path.exists():
        raise FileNotFoundError(
            f"The provided path does not exist: {lookup_path}."
        )

    # Search for project file path
    while lookup_path != lookup_path.parent:
        project_paths = [lookup_path / filename for filename in PROJECT_FILES]
        project_paths = [path for path in project_paths if path.is_file()]
        if len(project_paths) > 1:
            raise FileExistsError(
                f"Multiple project configuration files found in the same "
                f"directory: {lookup_path}. Given: {PROJECT_FILES!r}."
            )
        elif len(project_paths) == 1:
            return project_paths[0]

        if not force_resolution:
            break
        lookup_path = lookup_path.parent

    return None