"""``kedro.framework.project`` module provides utility to
configure a Kedro project and access its settings."""
from __future__ import annotations
import importlib
import logging.config
import operator
import os
import traceback
import warnings
from collections import UserDict
from collections.abc import MutableMapping
from pathlib import Path
from typing import TYPE_CHECKING, Any
import dynaconf
import importlib_resources
import yaml
from dynaconf import LazySettings
from dynaconf.validator import ValidationError, Validator
from kedro.io import CatalogProtocol
from kedro.pipeline import Pipeline, pipeline
if TYPE_CHECKING:
import types
IMPORT_ERROR_MESSAGE = (
"An error occurred while importing the '{module}' module. Nothing "
"defined therein will be returned by 'find_pipelines'.\n\n{tb_exc}"
)
def _get_default_class(class_import_path: str) -> Any:
module, _, class_name = class_import_path.rpartition(".")
def validator_func(settings: dynaconf.base.Settings, validators: Any) -> Any:
return getattr(importlib.import_module(module), class_name)
return validator_func
class _IsSubclassValidator(Validator):
"""A validator to check if the supplied setting value is a subclass of the default class"""
def validate(
self, settings: dynaconf.base.Settings, *args: Any, **kwargs: Any
) -> None:
super().validate(settings, *args, **kwargs)
default_class = self.default(settings, self)
for name in self.names:
setting_value = getattr(settings, name)
if not issubclass(setting_value, default_class):
raise ValidationError(
f"Invalid value '{setting_value.__module__}.{setting_value.__qualname__}' "
f"received for setting '{name}'. It must be a subclass of "
f"'{default_class.__module__}.{default_class.__qualname__}'."
)
class _ImplementsCatalogProtocolValidator(Validator):
"""A validator to check if the supplied setting value is a subclass of the default class"""
def validate(
self, settings: dynaconf.base.Settings, *args: Any, **kwargs: Any
) -> None:
super().validate(settings, *args, **kwargs)
protocol = CatalogProtocol
for name in self.names:
setting_value = getattr(settings, name)
if not isinstance(setting_value(), protocol):
raise ValidationError(
f"Invalid value '{setting_value.__module__}.{setting_value.__qualname__}' "
f"received for setting '{name}'. It must implement "
f"'{protocol.__module__}.{protocol.__qualname__}'."
)
class _HasSharedParentClassValidator(Validator):
"""A validator to check that the parent of the default class is an ancestor of
the settings value."""
def validate(
self, settings: dynaconf.base.Settings, *args: Any, **kwargs: Any
) -> None:
super().validate(settings, *args, **kwargs)
default_class = self.default(settings, self)
for name in self.names:
setting_value = getattr(settings, name)
# In the case of ConfigLoader, default_class.mro() will be:
# [kedro.config.config.ConfigLoader,
# kedro.config.abstract_config.AbstractConfigLoader,
# abc.ABC,
# object]
# We pick out the direct parent and check if it's in any of the ancestors of
# the supplied setting_value. This assumes that the direct parent is
# the abstract class that must be inherited from.
# A more general check just for a shared ancestor would be:
# set(default_class.mro()) & set(setting_value.mro()) - {abc.ABC, object}
default_class_parent = default_class.mro()[1]
if default_class_parent not in setting_value.mro():
raise ValidationError(
f"Invalid value '{setting_value.__module__}.{setting_value.__qualname__}' "
f"received for setting '{name}'. It must be a subclass of "
f"'{default_class_parent.__module__}.{default_class_parent.__qualname__}'."
)
class _ProjectSettings(LazySettings):
"""Define all settings available for users to configure in Kedro,
along with their validation rules and default values.
Use Dynaconf's LazySettings as base.
"""
_CONF_SOURCE = Validator("CONF_SOURCE", default="conf")
_HOOKS = Validator("HOOKS", default=tuple())
_CONTEXT_CLASS = _IsSubclassValidator(
"CONTEXT_CLASS",
default=_get_default_class("kedro.framework.context.KedroContext"),
)
_SESSION_STORE_CLASS = _IsSubclassValidator(
"SESSION_STORE_CLASS",
default=_get_default_class("kedro.framework.session.store.BaseSessionStore"),
)
_SESSION_STORE_ARGS = Validator("SESSION_STORE_ARGS", default={})
_DISABLE_HOOKS_FOR_PLUGINS = Validator("DISABLE_HOOKS_FOR_PLUGINS", default=tuple())
_CONFIG_LOADER_CLASS = _HasSharedParentClassValidator(
"CONFIG_LOADER_CLASS",
default=_get_default_class("kedro.config.OmegaConfigLoader"),
)
_CONFIG_LOADER_ARGS = Validator(
"CONFIG_LOADER_ARGS", default={"base_env": "base", "default_run_env": "local"}
)
_DATA_CATALOG_CLASS = _ImplementsCatalogProtocolValidator(
"DATA_CATALOG_CLASS",
default=_get_default_class("kedro.io.DataCatalog"),
)
def __init__(self, *args: Any, **kwargs: Any):
kwargs.update(
validators=[
self._CONF_SOURCE,
self._HOOKS,
self._CONTEXT_CLASS,
self._SESSION_STORE_CLASS,
self._SESSION_STORE_ARGS,
self._DISABLE_HOOKS_FOR_PLUGINS,
self._CONFIG_LOADER_CLASS,
self._CONFIG_LOADER_ARGS,
self._DATA_CATALOG_CLASS,
]
)
super().__init__(*args, **kwargs)
def _load_data_wrapper(func: Any) -> Any:
"""Wrap a method in _ProjectPipelines so that data is loaded on first access.
Taking inspiration from dynaconf.utils.functional.new_method_proxy
"""
def inner(self: Any, *args: Any, **kwargs: Any) -> Any:
self._load_data()
return func(self._content, *args, **kwargs)
return inner
class _ProjectPipelines(MutableMapping):
"""A read-only lazy dictionary-like object to hold the project pipelines.
When configured, it stores the pipelines module.
On first data access, e.g. through __getitem__, it will load the registered pipelines
This object is initialized lazily for a few reasons:
1. To support an unified way of importing via `from kedro.framework.project import pipelines`.
The pipelines object is initializedlazily since the framework doesn't have knowledge about
the project until `bootstrap_project` is run.
2. To speed up Kedro CLI performance. Loading the pipelines incurs overhead, as all related
modules need to be imported.
3. To ensure Kedro CLI remains functional when pipelines are broken. During development, broken
pipelines are common, but they shouldn't prevent other parts of Kedro CLI from functioning
properly (e.g. `kedro -h`).
"""
def __init__(self) -> None:
self._pipelines_module: str | None = None
self._is_data_loaded = False
self._content: dict[str, Pipeline] = {}
@staticmethod
def _get_pipelines_registry_callable(pipelines_module: str) -> Any:
module_obj = importlib.import_module(pipelines_module)
register_pipelines = getattr(module_obj, "register_pipelines")
return register_pipelines
def _load_data(self) -> None:
"""Lazily read pipelines defined in the pipelines registry module."""
# If the pipelines dictionary has not been configured with a pipelines module
# or if data has been loaded
if self._pipelines_module is None or self._is_data_loaded:
return
register_pipelines = self._get_pipelines_registry_callable(
self._pipelines_module
)
project_pipelines = register_pipelines()
self._content = project_pipelines
self._is_data_loaded = True
def configure(self, pipelines_module: str | None = None) -> None:
"""Configure the pipelines_module to load the pipelines dictionary.
Reset the data loading state so that after every ``configure`` call,
data are reloaded.
"""
self._pipelines_module = pipelines_module
self._is_data_loaded = False
self._content = {}
# Dict-like interface
__getitem__ = _load_data_wrapper(operator.getitem)
__setitem__ = _load_data_wrapper(operator.setitem)
__delitem__ = _load_data_wrapper(operator.delitem)
__iter__ = _load_data_wrapper(iter)
__len__ = _load_data_wrapper(len)
keys = _load_data_wrapper(operator.methodcaller("keys"))
values = _load_data_wrapper(operator.methodcaller("values"))
items = _load_data_wrapper(operator.methodcaller("items"))
# Presentation methods
__repr__ = _load_data_wrapper(repr)
__str__ = _load_data_wrapper(str)
class _ProjectLogging(UserDict):
def __init__(self) -> None:
"""Initialise project logging. The path to logging configuration is given in
environment variable KEDRO_LOGGING_CONFIG (defaults to conf/logging.yml)."""
logger = logging.getLogger(__name__)
user_logging_path = os.environ.get("KEDRO_LOGGING_CONFIG")
project_logging_path = Path("conf/logging.yml")
default_logging_path = Path(
Path(__file__).parent / "rich_logging.yml"
if importlib.util.find_spec("rich")
else Path(__file__).parent / "default_logging.yml",
)
path: str | Path
msg = ""
if user_logging_path:
path = user_logging_path
elif project_logging_path.exists():
path = project_logging_path
msg = "You can change this by setting the KEDRO_LOGGING_CONFIG environment variable accordingly."
else:
# Fallback to the framework default loggings
path = default_logging_path
msg = f"Using '{path!s}' as logging configuration. " + msg
# Load and apply the logging configuration
logging_config = Path(path).read_text(encoding="utf-8")
self.configure(yaml.safe_load(logging_config))
logger.info(msg)
def configure(self, logging_config: dict[str, Any]) -> None:
"""Configure project logging using ``logging_config`` (e.g. from project
logging.yml). We store this in the UserDict data so that it can be reconfigured
in _bootstrap_subprocess.
"""
logging.config.dictConfig(logging_config)
self.data = logging_config
def set_project_logging(self, package_name: str) -> None:
"""Add the project level logging to the loggers upon provision of a package name.
Checks if project logger already exists to prevent overwriting, if none exists
it defaults to setting project logs at INFO level."""
if package_name not in self.data["loggers"]:
self.data["loggers"][package_name] = {"level": "INFO"}
self.configure(self.data)
PACKAGE_NAME = None
LOGGING = _ProjectLogging()
settings = _ProjectSettings()
pipelines = _ProjectPipelines()
[docs]
def validate_settings() -> None:
"""Eagerly validate that the settings module is importable if it exists. This is desirable to
surface any syntax or import errors early. In particular, without eagerly importing
the settings module, dynaconf would silence any import error (e.g. missing
dependency, missing/mislabelled pipeline), and users would instead get a cryptic
error message ``Expected an instance of `ConfigLoader`, got `NoneType` instead``.
More info on the dynaconf issue: https://github.com/dynaconf/dynaconf/issues/460
"""
if PACKAGE_NAME is None:
raise ValueError(
"Package name not found. Make sure you have configured the project using "
"'bootstrap_project'. This should happen automatically if you are using "
"Kedro command line interface."
)
# Check if file exists, if it does, validate it.
if importlib.util.find_spec(f"{PACKAGE_NAME}.settings") is not None:
importlib.import_module(f"{PACKAGE_NAME}.settings")
else:
logger = logging.getLogger(__name__)
logger.warning("No 'settings.py' found, defaults will be used.")
def _create_pipeline(pipeline_module: types.ModuleType) -> Pipeline | None:
if not hasattr(pipeline_module, "create_pipeline"):
warnings.warn(
f"The '{pipeline_module.__name__}' module does not "
f"expose a 'create_pipeline' function, so no pipelines "
f"defined therein will be returned by 'find_pipelines'."
)
return None
obj = getattr(pipeline_module, "create_pipeline")()
if not isinstance(obj, Pipeline):
warnings.warn(
f"Expected the 'create_pipeline' function in the "
f"'{pipeline_module.__name__}' module to return a "
f"'Pipeline' object, got '{type(obj).__name__}' "
f"instead. Nothing defined therein will be returned by "
f"'find_pipelines'."
)
return None
return obj
[docs]
def find_pipelines(raise_errors: bool = False) -> dict[str, Pipeline]: # noqa: PLR0912
"""Automatically find modular pipelines having a ``create_pipeline``
function. By default, projects created using Kedro 0.18.3 and higher
call this function to autoregister pipelines upon creation/addition.
Projects that require more fine-grained control can still define the
pipeline registry without calling this function. Alternatively, they
can modify the mapping generated by the ``find_pipelines`` function.
For more information on the pipeline registry and autodiscovery, see
https://docs.kedro.org/en/stable/nodes_and_pipelines/pipeline_registry.html
Args:
raise_errors: If ``True``, raise an error upon failed discovery.
Returns:
A generated mapping from pipeline names to ``Pipeline`` objects.
Raises:
ImportError: When a module does not expose a ``create_pipeline``
function, the ``create_pipeline`` function does not return a
``Pipeline`` object, or if the module import fails up front.
If ``raise_errors`` is ``False``, see Warns section instead.
Warns:
UserWarning: When a module does not expose a ``create_pipeline``
function, the ``create_pipeline`` function does not return a
``Pipeline`` object, or if the module import fails up front.
If ``raise_errors`` is ``True``, see Raises section instead.
"""
pipeline_obj = None
# Handle the simplified project structure found in several starters.
pipeline_module_name = f"{PACKAGE_NAME}.pipeline"
try:
pipeline_module = importlib.import_module(pipeline_module_name)
except Exception as exc:
if str(exc) != f"No module named '{pipeline_module_name}'":
if raise_errors:
raise ImportError(
f"An error occurred while importing the "
f"'{pipeline_module_name}' module."
) from exc
warnings.warn(
IMPORT_ERROR_MESSAGE.format(
module=pipeline_module_name, tb_exc=traceback.format_exc()
)
)
else:
pipeline_obj = _create_pipeline(pipeline_module)
pipelines_dict = {"__default__": pipeline_obj or pipeline([])}
# Handle the case that a project doesn't have a pipelines directory.
try:
pipelines_package = importlib_resources.files(f"{PACKAGE_NAME}.pipelines")
except ModuleNotFoundError as exc:
if str(exc) == f"No module named '{PACKAGE_NAME}.pipelines'":
return pipelines_dict
for pipeline_dir in pipelines_package.iterdir():
if not pipeline_dir.is_dir():
continue
pipeline_name = pipeline_dir.name
if pipeline_name == "__pycache__":
continue
# Prevent imports of hidden directories/files
if pipeline_name.startswith("."):
continue
pipeline_module_name = f"{PACKAGE_NAME}.pipelines.{pipeline_name}"
try:
pipeline_module = importlib.import_module(pipeline_module_name)
except Exception as exc:
if raise_errors:
raise ImportError(
f"An error occurred while importing the "
f"'{pipeline_module_name}' module."
) from exc
warnings.warn(
IMPORT_ERROR_MESSAGE.format(
module=pipeline_module_name, tb_exc=traceback.format_exc()
)
)
continue
pipeline_obj = _create_pipeline(pipeline_module)
if pipeline_obj is not None:
pipelines_dict[pipeline_name] = pipeline_obj
return pipelines_dict