Skip to content

Commit

Permalink
Add basic macro interfaces and path handling
Browse files Browse the repository at this point in the history
  • Loading branch information
bswck committed May 15, 2024
1 parent 5685468 commit 40bdf24
Show file tree
Hide file tree
Showing 6 changed files with 232 additions and 92 deletions.
95 changes: 53 additions & 42 deletions configzen/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
from __future__ import annotations

from collections.abc import Callable, Iterable, Mapping
from contextlib import suppress
from contextvars import ContextVar
from dataclasses import dataclass
from functools import wraps
from typing import TYPE_CHECKING, Any, ClassVar, cast
from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, cast

from anyio.to_thread import run_sync
from pydantic import BaseModel, PrivateAttr
Expand All @@ -15,9 +15,9 @@
from pydantic_settings import BaseSettings
from pydantic_settings.main import SettingsConfigDict

from configzen.context import copy_context_on_await, copy_context_on_call
from configzen.context import isolated_context_coroutine, isolated_context_function
from configzen.data import roundtrip_update_mapping
from configzen.processor import ConfigProcessor
from configzen.processor import ConfigProcessor, FileSystemAwareConfigProcessor
from configzen.routes import (
EMPTY_ROUTE,
GetAttr,
Expand Down Expand Up @@ -51,10 +51,20 @@ class ModelConfig(SettingsConfigDict, total=False):


pydantic_config_keys |= set(ModelConfig.__annotations__)
loading: ContextVar[bool] = ContextVar("loading", default=False)
processing: ContextVar[ProcessingContext | None] = ContextVar(
"processing",
default=None,
)
owner_lookup: ContextVar[BaseConfig] = ContextVar("owner")


class ProcessingContext(NamedTuple):
model_class: type[BaseConfig]
processor: ConfigProcessor
# We keep it mainly to make path resolution smarter.
trace: list[ConfigSource]


def _locate(
owner: object,
value: object,
Expand Down Expand Up @@ -150,7 +160,7 @@ def __init__(self, **data: Any) -> None:
owner = owner_lookup.get()
except LookupError:
owner = None
if loading.get():
if processing.get():
owner_lookup.set(self)
super().__init__(**data)
self._config_root = owner
Expand Down Expand Up @@ -192,7 +202,7 @@ def config_processor(self) -> ConfigProcessor:
"""
if self._config_root is None:
if not hasattr(self, "_config_processor"):
return ConfigProcessor(self.config_dump())
return FileSystemAwareConfigProcessor(self.config_dump())
return self._config_processor
return self._config_root.config_processor

Expand Down Expand Up @@ -251,11 +261,29 @@ def _validate_processor_factory(
"Callable[..., ConfigProcessor] | None",
cls.model_config.get("config_processor_factory"),
)
or ConfigProcessor
or FileSystemAwareConfigProcessor
)

@classmethod
@copy_context_on_call
def _try_rebuild_model(cls) -> None:
# Possible scenarios:
# (sync) Frame 1: <class>.config_load()
# (sync) Frame 2: isolated_context_function.<locals>.copy()
# (sync) Frame 3: run_isolated()
# (sync) Frame 4: <class>.config_load()
# (sync) Frame 5: <class>.model_rebuild()
#
# (async) Frame 1: <class>.config_load_async()
# (async) Frame 2: isolated_context_function.<locals>.copy()
# (async) Frame 3: run_isolated()
# (async) Frame 4: <class>.config_load()
# (async) Frame 5: <class>.model_rebuild()
if cls.model_config["rebuild_on_load"]:
with suppress(Exception):
cls.model_rebuild(_parent_namespace_depth=5)

@classmethod
@isolated_context_function
def config_load(
cls,
source: object | None = None,
Expand All @@ -276,7 +304,7 @@ def config_load(
that case just create `BinaryFileConfigSource("plist_file.plist")`.
context
The context to use during model validation.
See also https://docs.pydantic.dev/latest/api/base_model @ `model_validate`.
See also [`model_validate`][pydantic.BaseModel.model_validate].
processor_factory
The state factory to use to parse the newly loaded configuration data.
Expand All @@ -285,12 +313,7 @@ def config_load(
self
"""
if cls.model_config["rebuild_on_load"]:
# Frame 1: copy_context_and_call.<locals>.copy()
# Frame 2: copy_and_run()
# Frame 3: <class>.config_load()
# Frame 4: <class>.model_rebuild()
cls.model_rebuild(_parent_namespace_depth=4)
cls._try_rebuild_model()

# Validate the source we load our configuration from.
config_source = cls._validate_config_source(source)
Expand All @@ -308,7 +331,7 @@ def config_load(

# ruff: noqa: FBT003
try:
loading.set(True)
processing.set(ProcessingContext(cls, processor, trace=[config_source]))

# Processing will execute any commands that are present
# in the configuration data and return the final configuration
Expand All @@ -319,16 +342,15 @@ def config_load(
# is saved (`processor.revert_processor_changes()`).
self = cls(**processor.get_processed_data())
finally:
loading.set(False)
processing.set(None)

# Quick setup and we're done.
self._config_source = config_source
self._config_processor = processor
return self

@classmethod
@copy_context_on_await
@wraps(config_load)
@isolated_context_coroutine
async def config_load_async(
cls,
source: object | None = None,
Expand All @@ -355,28 +377,22 @@ async def config_load_async(
self
"""
# Intentionally not using `run_sync(config_load)` here.
# We want to keep every user-end object handled by the same thread.
cls._try_rebuild_model()

if cls.model_config["rebuild_on_load"]:
# Frame 1: copy_context_on_await.<locals>.copy_async()
# Frame 2: copy_and_await()
# Frame 3: <class>.config_load_async()
# Frame 4: <class>.model_rebuild()
cls.model_rebuild(_parent_namespace_depth=4)
# Intentionally not using `run_sync(config_load)` here.
# We want to keep make the set up instructions blocking to avoid running
# into mutexes.

config_source = cls._validate_config_source(source)
make_processor = cls._validate_processor_factory(processor_factory)
processor = make_processor(await config_source.load_async())

try:
loading.set(True)
processing.set(ProcessingContext(cls, processor, trace=[config_source]))

# Since `processor.get_processed_data()` operates on primitive data types,
# we can safely use run_sync here to run in a worker thread.
self = cls(**await run_sync(processor.get_processed_data))
finally:
loading.set(False)
processing.set(None)

self._config_processor = processor
self._config_source = config_source
Expand Down Expand Up @@ -439,11 +455,6 @@ async def config_reload_async(self) -> Self:

return self

@wraps(config_reload_async)
async def reload_async(self) -> Self:
"""Do the same as `config_reload_async`."""
return await self.config_reload_async()

def _config_data_save(
self,
destination: object | None = None,
Expand All @@ -464,7 +475,7 @@ def _config_data_save(
new_data = self.config_dump()
else:
# Construct a new configuration instance.
# Respect __class__ attribute since root might be a proxy (from proxyvars).
# Respect `__class__` attribute: root might be a proxy, e.g. from proxyvars.
new_root = root.__class__(**processor.get_processed_data())
routes = root.config_find_routes(self)

Expand Down Expand Up @@ -499,8 +510,8 @@ def config_save(self, destination: object | None = None) -> Self:
that case just create `BinaryFileConfigSource("plist_file.plist")`.
"""
config_source, data = self._config_data_save(destination)
config_source.dump(data)
config_destination, data = self._config_data_save(destination)
config_destination.dump(data)
return self

async def config_save_async(self, destination: object | None = None) -> Self:
Expand All @@ -518,8 +529,8 @@ async def config_save_async(self, destination: object | None = None) -> Self:
that case just create `BinaryFileConfigSource("plist_file.plist")`.
"""
config_source, data = self._config_data_save(destination)
await config_source.dump_async(data)
config_destination, data = self._config_data_save(destination)
await config_destination.dump_async(data)
return self

def config_at(self, *routes: RouteLike) -> Item:
Expand Down
24 changes: 12 additions & 12 deletions configzen/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,14 @@


__all__ = (
"copy_context_on_call",
"copy_context_on_await",
"copy_and_run",
"copy_and_await",
"isolated_context_function",
"isolated_context_coroutine",
"run_isolated",
"async_run_isolated",
)


def copy_context_on_call(func: Callable[_P, _T]) -> Callable[_P, _T]:
def isolated_context_function(func: Callable[_P, _T]) -> Callable[_P, _T]:
"""
Copy the context automatically on function call.
Expand All @@ -34,16 +34,16 @@ def copy_context_on_call(func: Callable[_P, _T]) -> Callable[_P, _T]:
Used as a decorator.
"""
if isinstance(func, (classmethod, staticmethod)):
return type(func)(copy_context_on_call(func.__func__))
return type(func)(isolated_context_function(func.__func__))

@wraps(func)
def copy(*args: _P.args, **kwargs: _P.kwargs) -> _T:
return copy_and_run(func, *args, **kwargs)
return run_isolated(func, *args, **kwargs)

return copy


def copy_context_on_await(
def isolated_context_coroutine(
func: Callable[_P, Coroutine[object, object, _T]],
) -> Callable[_P, Coroutine[object, object, _T]]:
"""
Expand All @@ -54,22 +54,22 @@ def copy_context_on_await(
Used as a decorator.
"""
if isinstance(func, (classmethod, staticmethod)):
return type(func)(copy_context_on_await(func.__func__))
return type(func)(isolated_context_coroutine(func.__func__))

@wraps(func)
async def copy_async(*args: _P.args, **kwargs: _P.kwargs) -> _T:
return await copy_and_await(func, *args, **kwargs)
return await async_run_isolated(func, *args, **kwargs)

return copy_async


def copy_and_run(func: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T:
def run_isolated(func: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T:
"""Run a function in an isolated context."""
context = contextvars.copy_context()
return context.run(func, *args, **kwargs)


def copy_and_await(
def async_run_isolated(
func: Callable[_P, Coroutine[object, object, _T]],
*args: _P.args,
**kwargs: _P.kwargs,
Expand Down
Loading

0 comments on commit 40bdf24

Please sign in to comment.