diff --git a/src/omnipy/__init__.py b/src/omnipy/__init__.py index 5013e53e..36431041 100644 --- a/src/omnipy/__init__.py +++ b/src/omnipy/__init__.py @@ -137,6 +137,7 @@ from omnipy.modules.tables.tasks import (remove_columns, rename_col_names, transpose_columns_with_data_files) +from omnipy.util.contexts import print_exception # if typing.TYPE_CHECKING: @@ -288,5 +289,6 @@ 'union_all', 'remove_columns', 'rename_col_names', - 'transpose_columns_with_data_files' + 'transpose_columns_with_data_files', + 'print_exception', ] diff --git a/src/omnipy/compute/mixins/serialize.py b/src/omnipy/compute/mixins/serialize.py index f9ec77e0..43158bdf 100644 --- a/src/omnipy/compute/mixins/serialize.py +++ b/src/omnipy/compute/mixins/serialize.py @@ -199,7 +199,7 @@ def _generate_datetime_str(self): def _all_job_output_file_paths_in_reverse_order_for_last_run( persist_data_dir_path: Path, job_name: str) -> Generator[Path, None, None]: - sorted_date_dirs = iter(sorted(os.listdir(persist_data_dir_path))) + sorted_date_dirs = iter(reversed(sorted(os.listdir(persist_data_dir_path)))) try: last_dir = next(sorted_date_dirs) diff --git a/src/omnipy/data/dataset.py b/src/omnipy/data/dataset.py index 311eb6f6..3230245f 100644 --- a/src/omnipy/data/dataset.py +++ b/src/omnipy/data/dataset.py @@ -12,10 +12,8 @@ get_args, get_origin, Iterator, - Optional, Type, - TypeAlias, - TypeVar) + TypeAlias) from urllib.parse import ParseResult, urlparse import humanize @@ -26,6 +24,7 @@ from pydantic.generics import GenericModel from pydantic.main import ModelMetaclass from pydantic.utils import lenient_isinstance, lenient_issubclass +from typing_extensions import TypeVar from omnipy.data.data_class_creator import DataClassBase, DataClassBaseMeta from omnipy.data.model import (_cleanup_name_qualname_and_module, @@ -39,15 +38,13 @@ from omnipy.util.helpers import (get_calling_module_name, get_default_if_typevar, is_iterable, - is_optional, - is_strict_subclass, is_union, remove_annotated_plus_optional_if_present, remove_forward_ref_notation) from omnipy.util.tabulate import tabulate from omnipy.util.web import download_file_to_memory -ModelT = TypeVar('ModelT', bound=Model) +ModelT = TypeVar('ModelT', bound=Model, default=Model[object]) _DatasetT = TypeVar('_DatasetT') DATA_KEY = 'data' @@ -113,45 +110,50 @@ class MyDataset(Dataset[MyToplevelDict]): """ class Config: validate_assignment = True + + # TODO: Use json serializer package from the pydantic config instead of 'json' + # json_loads = orjson.loads # json_dumps = orjson_dumps data: dict[str, ModelT] = Field(default={}) - def __class_getitem__(cls, model: ModelT) -> ModelT: + def __class_getitem__( + cls, + params: type[ModelT] | tuple[type[ModelT]] | tuple[type[ModelT], Any] | TypeVar + | tuple[TypeVar, ...], + ) -> 'type[Dataset[type[ModelT]]]': # TODO: change model type to params: Type[Any] | tuple[Type[Any], ...] # as in GenericModel. - # For now, only singular model types are allowed. These lines are needed for - # interoperability with pydantic GenericModel, which internally stores the model - # as a tuple: - if isinstance(model, tuple) and len(model) == 1: - model = model[0] + # These lines are needed for interoperability with pydantic GenericModel, which internally + # stores the model as a len(1) tuple + model = params[0] if isinstance(params, tuple) and len(params) == 1 else params orig_model = model - model = cls._origmodel_if_annotated_optional(model) - args = get_args(model) + if cls == Dataset: + # model = cls._origmodel_if_annotated_optional(model) - if is_union(model) and len(args) == 2 and lenient_issubclass(args[1], DataWithParams): - model_to_check = args[0] - else: - model_to_check = model + args = get_args(model) - if not isinstance(model_to_check, TypeVar) \ - and not lenient_issubclass(model_to_check, Model) \ - and not is_strict_subclass(cls, Dataset): - raise TypeError('Invalid model: {}! '.format(model_to_check) - + 'omnipy Dataset models must be a specialization of the omnipy ' - 'Model class.') + if is_union(model) and len(args) == 2 and lenient_issubclass(args[1], DataWithParams): + model_to_check = args[0] + else: + model_to_check = model - if cls == Dataset and not is_optional(model): # TODO: Handle MultiModelDataset?? - model = Annotated[Optional[model], 'Fake Optional from Dataset'] + if not isinstance(model_to_check, TypeVar) \ + and not lenient_issubclass(model_to_check, Model): + raise TypeError('Invalid model: {}! '.format(model_to_check) + + 'omnipy Dataset models must be a specialization of the omnipy ' + 'Model class.') - if isinstance(model, TypeVar): - model = get_default_if_typevar(model) + created_dataset = super().__class_getitem__(model) + else: + if isinstance(model, TypeVar): + params = get_default_if_typevar(model) - created_dataset = super().__class_getitem__(model) + created_dataset = super().__class_getitem__(params) _cleanup_name_qualname_and_module(cls, created_dataset, orig_model) @@ -185,7 +187,7 @@ def __init__( # noqa: C901 if value != Undefined: assert data == Undefined, \ 'Not allowed to combine positional and "data" keyword argument' - assert len(kwargs) == 0 or self.get_model_class().is_param_model(), \ + assert len(kwargs) == 0, \ 'Not allowed to combine positional and keyword arguments' super_kwargs[DATA_KEY] = value @@ -194,18 +196,18 @@ def __init__( # noqa: C901 "Not allowed to combine 'data' with other keyword arguments" super_kwargs[DATA_KEY] = data - model_cls = self.get_model_class() + # model_cls = self.get_model_class() if kwargs: if DATA_KEY not in super_kwargs: - assert isinstance(model_cls, TypeVar) or not model_cls.is_param_model(), \ - ('If any keyword arguments are defined, parametrized datasets require at least ' - 'one positional argument in the __init__ method (typically providing the data ' - 'in the form of a dict from name to content for each data file).') - + # assert isinstance(model_cls, TypeVar) or not model_cls.is_param_model(), \ + # ('If any keyword arguments are defined, parametrized datasets require at least ' + # 'one positional argument in the __init__ method (typically providing the data ' + # 'in the form of a dict from name to content for each data file).') + # super_kwargs[DATA_KEY] = kwargs kwargs = {} - if model_cls == ModelT: + if self.get_model_class() == ModelT: self._raise_no_model_exception() dataset_as_input = DATA_KEY in super_kwargs \ @@ -216,10 +218,10 @@ def __init__( # noqa: C901 self._init(super_kwargs, **kwargs) try: - GenericModel.__init__(self, **super_kwargs) + super().__init__(**super_kwargs) except ValidationError: if dataset_as_input: - GenericModel.__init__(self) + super().__init__() self.from_data(super_kwargs[DATA_KEY]) else: raise @@ -258,7 +260,8 @@ def get_model_class(cls) -> Type[Model]: :return: The concrete Model class used for all data files in the dataset """ model_type = cls._get_data_field().type_ - return cls._origmodel_if_annotated_optional(model_type) + # return cls._origmodel_if_annotated_optional(model_type) + return model_type @classmethod def _origmodel_if_annotated_optional(cls, model): @@ -268,24 +271,18 @@ def _origmodel_if_annotated_optional(cls, model): model = get_args(model)[0] return model - # TODO: Update _raise_no_model_exception() text. Model is now a requirement @staticmethod def _raise_no_model_exception() -> None: raise TypeError( - 'Note: The Dataset class requires a concrete model to be specified as ' + 'Note: The Dataset class requires a Model class (or a subclass) to be specified as ' 'a type hierarchy within brackets either directly, e.g.:\n\n' - '\tmodel = Dataset[list[int]]()\n\n' + '\tmodel = Dataset[Model[list[int]]]()\n\n' 'or indirectly in a subclass definition, e.g.:\n\n' - '\tclass MyNumberListDataset(Dataset[list[int]]): ...\n\n' - 'In both cases, the use of the Model class or a subclass is encouraged if anything ' - 'other than the simplest cases, e.g.:\n\n' + '\tclass MyNumberListDataset(Dataset[Model[list[int]]]): ...\n\n' + 'For anything other than the simplest cases, the definition of Model and Dataset ' + 'subclasses is encouraged , e.g.:\n\n' '\tclass MyNumberListModel(Model[list[int]]): ...\n' - '\tclass MyDataset(Dataset[MyNumberListModel]): ...\n\n' - 'Usage of Dataset without a type specification results in this exception. ' - 'Similar use of the Model class do not currently result in an exception, only ' - 'a warning message the first time this is done. However, this is just a ' - '"poor man\'s exception" due to complex technicalities in that class. Please ' - 'explicitly specify types in both cases. ') + '\tclass MyDataset(Dataset[MyNumberListModel]): ...\n\n') def _set_standard_field_description(self) -> None: self.__fields__[DATA_KEY].field_info.description = self._get_standard_field_description() @@ -587,7 +584,7 @@ def _table_repr(self) -> str: ((i, k, type(v).__name__, - v.__len__() if hasattr(v, '__len__') else 'N/A', + len(v) if hasattr(v, '__len__') else 'N/A', humanize.naturalsize(objsize.get_deep_size(v))) for i, (k, v) in enumerate(self.items())), ('#', 'Data file name', 'Type', 'Length', 'Size (in memory)'), @@ -597,10 +594,10 @@ def _table_repr(self) -> str: return ret -# TODO: Use json serializer package from the pydantic config instead of 'json' +ModelNewT = TypeVar('ModelNewT', bound=Model, default=Model[object]) -class MultiModelDataset(Dataset[ModelT], Generic[ModelT]): +class MultiModelDataset(Dataset[ModelNewT], Generic[ModelNewT]): """ Variant of Dataset that allows custom models to be set on individual data files @@ -608,9 +605,9 @@ class MultiModelDataset(Dataset[ModelT], Generic[ModelT]): custom models. """ - _custom_field_models: dict[str, ModelT] = PrivateAttr(default={}) + _custom_field_models: dict[str, ModelNewT] = PrivateAttr(default={}) - def set_model(self, data_file: str, model: ModelT) -> None: + def set_model(self, data_file: str, model: ModelNewT) -> None: try: self._custom_field_models[data_file] = model if data_file in self.data: @@ -621,7 +618,7 @@ def set_model(self, data_file: str, model: ModelT) -> None: del self._custom_field_models[data_file] raise - def get_model(self, data_file: str) -> ModelT: + def get_model(self, data_file: str) -> ModelNewT: if data_file in self._custom_field_models: return self._custom_field_models[data_file] else: @@ -635,7 +632,7 @@ def _validate(self, data_file: str) -> None: data_obj = self._to_data_if_model(self.data[data_file]) parsed_data = self._to_data_if_model(model(data_obj)) self.data[data_file] = parsed_data - super()._validate(data_file) # validates all data according to ModelT + super()._validate(data_file) # validates all data according to ModelNewT @staticmethod def _to_data_if_model(data_obj: Any): @@ -644,9 +641,9 @@ def _to_data_if_model(data_obj: Any): return data_obj -_KwargValT = TypeVar('_KwargValT', bound=object) -_ParamModelT = TypeVar('_ParamModelT', bound=ParamModel) -_ListOfParamModelT = TypeVar('_ListOfParamModelT', bound=ListOfParamModel) +_KwargValT = TypeVar('_KwargValT', bound=object, default=object) +_ParamModelT = TypeVar('_ParamModelT', bound=ParamModel, default=ParamModel) +_ListOfParamModelT = TypeVar('_ListOfParamModelT', bound=ListOfParamModel, default=ListOfParamModel) ParamModelSuperKwargsType: TypeAlias = \ dict[str, dict[str, _ParamModelT | DataWithParams[_ParamModelT, _KwargValT]]] diff --git a/src/omnipy/data/helpers.py b/src/omnipy/data/helpers.py index 79a66691..84aa4ce8 100644 --- a/src/omnipy/data/helpers.py +++ b/src/omnipy/data/helpers.py @@ -11,6 +11,22 @@ def __init__(self, t: T) -> None: raise ValueError() +class TypeVarStore1(TypeVarStore[T], Generic[T]): + ... + + +class TypeVarStore2(TypeVarStore[T], Generic[T]): + ... + + +class TypeVarStore3(TypeVarStore[T], Generic[T]): + ... + + +class TypeVarStore4(TypeVarStore[T], Generic[T]): + ... + + class YesNoMaybe(IntEnum): NO = 0 YES = 1 @@ -26,7 +42,7 @@ class MethodInfo(NamedTuple): # (https://docs.python.org/3.10/reference/datamodel.html) _SPECIAL_METHODS_INFO_DICT: dict[str, MethodInfo] = { # 3.3.1. Basic customization ############################################ - '__bool__': MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.NO), + # '__bool__': MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.NO), # 3.3.7. Emulating container types ###################################### '__len__': MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.NO), '__length_hint__': MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.NO), @@ -98,6 +114,8 @@ class MethodInfo(NamedTuple): '__trunc__': MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.MAYBE), '__floor__': MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.MAYBE), '__ceil__': MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.MAYBE), + # - Hash and other standard methods ---------------------------------- + '__hash__': MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.NO), } diff --git a/src/omnipy/data/model.py b/src/omnipy/data/model.py index 0fffcbd4..14734c57 100644 --- a/src/omnipy/data/model.py +++ b/src/omnipy/data/model.py @@ -1,5 +1,5 @@ from collections import defaultdict -from collections.abc import Iterable, Mapping, Sequence +from collections.abc import Callable, Iterable, Mapping, Sequence from contextlib import contextmanager, suppress import functools import inspect @@ -10,7 +10,6 @@ from types import GenericAlias, ModuleType, NoneType, UnionType from typing import (Annotated, Any, - Callable, cast, ContextManager, ForwardRef, @@ -74,6 +73,9 @@ _RootT = TypeVar('_RootT', bound=object | None, default=object) _ModelT = TypeVar('_ModelT') +_ParamRootT = TypeVar('_ParamRootT', default=object | None) +_KwargValT = TypeVar('_KwargValT', default=object) + ROOT_KEY = '__root__' # TODO: Refactor Dataset and Model using mixins (including below functions) @@ -246,9 +248,9 @@ def _get_default_value_from_model(cls, model: type[_RootT] | TypeForm | TypeVar) origin_type = get_origin(model) args = get_args(model) - if origin_type is Annotated: - model = remove_annotated_plus_optional_if_present(model) - return cls._get_default_value_from_model(model) + # if origin_type is Annotated: + # model = remove_annotated_plus_optional_if_present(model) + # return cls._get_default_value_from_model(model) if origin_type in (None, ()): origin_type = model @@ -273,6 +275,9 @@ def _get_default_value_from_model(cls, model: type[_RootT] | TypeForm | TypeVar) if origin_type is Literal: return args[0] + if origin_type is Callable: + return cast(_RootT, lambda: None) + if origin_type is ForwardRef or type(origin_type) is ForwardRef: raise TypeError(f'Cannot instantiate model "{model}". ') @@ -343,11 +348,10 @@ def _prepare_cls_members_to_mimic_model(cls, created_model: 'Model[type[_RootT]] names_to_check = (name, '__add__') if name in ('__iadd__', '__radd__') else (name,) for type_to_support in outer_types: for name_to_check in names_to_check: - if hasattr(type_to_support, name_to_check): - setattr(created_model, - name, - functools.partialmethod(cls._special_method, name, method_info)) - break + setattr(created_model, + name, + functools.partialmethod(cls._special_method, name, method_info)) + break else: continue # To let the inner break, also break the outer for loop @@ -539,9 +543,9 @@ def __del__(self): self.snapshot_holder.schedule_deepcopy_content_ids_for_deletion(contents_id) @classmethod - def clone_model_cls(cls: type[_ModelT], model_name: str) -> type[_ModelT]: - new_model: type[_ModelT] = type(model_name, (cls,), {}) - return new_model + def clone_model_cls(cls: type[_ModelT], new_model_cls_name: str) -> type[_ModelT]: + new_model_cls: type[_ModelT] = type(new_model_cls_name, (cls,), {}) + return new_model_cls @staticmethod def _raise_no_model_exception() -> None: @@ -588,10 +592,51 @@ def temporary_set_value_iter_to_pydantic_method() -> Iterator[None]: @classmethod def update_forward_refs(cls, **localns: Any) -> None: - """ - Try to update ForwardRefs on fields based on this Model, globalns and localns. - """ - super().update_forward_refs(**localns) + def _identify_all_forward_refs_in_model_field(field: ModelField, + all_forward_refs: list[ForwardRef] = []): + if field: + if field.outer_type_.__class__ == ForwardRef: + all_forward_refs.append(field.outer_type_) + if field.type_.__class__ == ForwardRef: + all_forward_refs.append(field.type_) + if field.sub_fields: + for sub_f in field.sub_fields: + _identify_all_forward_refs_in_model_field(sub_f, all_forward_refs) + return all_forward_refs + + import sys + if cls.__module__ in sys.modules: + globalns = sys.modules[cls.__module__].__dict__.copy() + else: + globalns = {} + + root_field = cls._get_root_field() + new_localns = localns.copy() + for forward_ref in _identify_all_forward_refs_in_model_field(root_field): + type_name = forward_ref.__forward_arg__ + typ_ = localns.get(type_name) + if not typ_: + typ_ = globalns.get(type_name) + if typ_: + new_localns[type_name] = cls._wrap_with_annotated_optional(typ_) + + super().update_forward_refs(**new_localns) + + # cls._add_annotated_optional_hack_to_model(cls) + # super().update_forward_refs(**localns) + cls._remove_annotated_optional_hack_from_model(cls, recursive=True) + + root_field = cls._get_root_field() + if root_field: + assert root_field.allow_none + + # if root_field.sub_fields and not (is_union(root_field.outer_type_) or get_origin(root_field.outer_type_) in [list, dict]): + if root_field.sub_fields and not (get_origin(root_field.outer_type_) in [list, dict]): + # if root_field.sub_fields: + for sub_field in root_field.sub_fields: + if sub_field.type_.__class__ is not ForwardRef: + ... + cls.__name__ = remove_forward_ref_notation(cls.__name__) cls.__qualname__ = remove_forward_ref_notation(cls.__qualname__) @@ -800,8 +845,8 @@ def _parse_with_root_type_if_model(cls, value: _RootT | None, root_field: ModelField, root_type: TypeForm) -> _RootT: - if get_origin(root_type) is Annotated: - root_type = remove_annotated_plus_optional_if_present(root_type) + # if get_origin(root_type) is Annotated: + # root_type = remove_annotated_plus_optional_if_present(root_type) if get_origin(root_type) is Union: last_error_holder = LastErrorHolder() @@ -830,8 +875,9 @@ def _parse_with_root_type_if_model(cls, return cast(_RootT, value if is_model_instance(value) else root_type.parse_obj(value)) if value is None: - none_default = root_field.default_factory() is None if root_field.default_factory \ - else root_field.default is None + default_value = root_field.get_default() + none_default = default_value is None or (is_model_instance(default_value) + and default_value.contents is None) root_type_is_none = is_none_type(root_type) root_type_is_optional = get_origin(root_type) is Union \ and any(is_none_type(arg) for arg in get_args(root_type)) @@ -915,6 +961,7 @@ def is_nested_type(cls) -> bool: return not cls.inner_type(with_args=True) == cls.outer_type(with_args=True) @classmethod + # Refactor: Remove is_param_model def is_param_model(cls) -> bool: if cls.outer_type() is list: type_to_check = cls.inner_type(with_args=True) @@ -933,7 +980,7 @@ def _get_root_field(cls) -> ModelField: def _get_root_type(cls, outer: bool, with_args: bool) -> TypeForm | None: root_field = cls._get_root_field() root_type = root_field.outer_type_ if outer else root_field.type_ - root_type = remove_annotated_plus_optional_if_present(root_type) + # root_type = remove_annotated_plus_optional_if_present(root_type) return root_type if with_args else ensure_plain_type(root_type) # @classmethod @@ -1127,7 +1174,7 @@ def _iadd(other): try: method = cast(Callable, self._getattr_from_contents_obj(name)) except AttributeError as e: - if name in ('__int__', '__bool__', '__float__', '__complex__'): + if name in ('__int__', '__float__', '__complex__'): raise ValueError from e if name == '__len__': raise TypeError(f"object of type '{self.__class__.__name__}' has no len()") @@ -1251,49 +1298,49 @@ def _convert_to_model_if_reasonable( # noqa: C901 for type_to_check in all_type_variants(outer_type): # TODO: Remove inner_type_to_check loop when Annotated hack is removed with # pydantic v2 - type_to_check = cast(type | GenericAlias, - remove_annotated_plus_optional_if_present(type_to_check)) - for inner_type_to_check in all_type_variants(type_to_check): - plain_inner_type_to_check = ensure_plain_type(inner_type_to_check) - # if plain_inner_type_to_check in (ForwardRef, TypeVar, Literal, None): - if plain_inner_type_to_check in (ForwardRef, TypeVar, None): - continue - - if level_up: - inner_type_args = get_args(inner_type_to_check) - if len(inner_type_args) == 0: - inner_type_args = (inner_type_to_check,) - if inner_type_args: - for level_up_type_to_check in all_type_variants( - inner_type_args[level_up_arg_idx]): - level_up_type_to_check = self._fix_tuple_type_from_args( - level_up_type_to_check) - if self._is_instance_or_literal( - ret, - ensure_plain_type(level_up_type_to_check), - level_up_type_to_check, - ): - try: - return Model[level_up_type_to_check](ret) # type: ignore - except ValidationError: - if raise_validation_errors: - raise - except TypeError: - pass + # type_to_check = cast(type | GenericAlias, + # remove_annotated_plus_optional_if_present(type_to_check)) + # for inner_type_to_check in all_type_variants(type_to_check): + plain_type_to_check = ensure_plain_type(type_to_check) + # if plain_type_to_check in (ForwardRef, TypeVar, Literal, None): + if plain_type_to_check in (ForwardRef, TypeVar, None): + continue + + if level_up: + type_args = get_args(type_to_check) + if len(type_args) == 0: + type_args = (type_to_check,) + if type_args: + for level_up_type_to_check in all_type_variants( + type_args[level_up_arg_idx]): + level_up_type_to_check = self._fix_tuple_type_from_args( + level_up_type_to_check) + if self._is_instance_or_literal( + ret, + ensure_plain_type(level_up_type_to_check), + level_up_type_to_check, + ): + try: + return Model[level_up_type_to_check](ret) # type: ignore + except ValidationError: + if raise_validation_errors: + raise + except TypeError: + pass - else: - if self._is_instance_or_literal( - ret, - plain_inner_type_to_check, - inner_type_to_check, - ): - try: - return self.__class__(ret) - except ValidationError: - if raise_validation_errors: - raise - except TypeError: - pass + else: + if self._is_instance_or_literal( + ret, + plain_type_to_check, + type_to_check, + ): + try: + return self.__class__(ret) + except ValidationError: + if raise_validation_errors: + raise + except TypeError: + pass return cast(_ReturnT, ret) @@ -1387,9 +1434,20 @@ def __repr__(self) -> str: return self._table_repr() return self._trad_repr() - def __hash__(self) -> int: + def __bool__(self): + if self._get_real_contents(): + return True + else: + return False + + def __call__(self, *args: object, **kwargs: object) -> object: + if not hasattr(self._get_real_contents(), '__call__'): + raise TypeError(f"'{self.__class__.__name__}' object is not callable") return self._special_method( - '__hash__', MethodInfo(state_changing=False, returns_same_type=YesNoMaybe.NO)) + '__call__', + MethodInfo(state_changing=True, returns_same_type=YesNoMaybe.NO), + *args, + **kwargs) def view(self): from omnipy.modules.pandas.models import PandasModel @@ -1478,10 +1536,6 @@ def _is_table(): return out -_ParamRootT = TypeVar('_ParamRootT', default=object | None) -_KwargValT = TypeVar('_KwargValT', default=object) - - class DataWithParams(GenericModel, Generic[_ParamRootT, _KwargValT]): data: _ParamRootT params: dict[str, _KwargValT] @@ -1539,7 +1593,7 @@ def _validate_and_set_contents_with_params(self, contents: _ParamRootT, **kwargs self._validate_and_set_value(DataWithParams(data=contents, params=kwargs)) -_ParamModelT = TypeVar('_ParamModelT', bound='ParamModel') +_ParamModelT = TypeVar('_ParamModelT', bound='ParamModel', default='ParamModel') class ListOfParamModel(ParamModel[list[_ParamModelT diff --git a/src/omnipy/modules/frozen/datasets.py b/src/omnipy/modules/frozen/datasets.py index e7888119..6f196e57 100644 --- a/src/omnipy/modules/frozen/datasets.py +++ b/src/omnipy/modules/frozen/datasets.py @@ -1,22 +1,20 @@ -from typing import Generic, Hashable, TypeVar +from typing import Generic from omnipy.data.dataset import Dataset from omnipy.modules.frozen.models import (NestedFrozenDictsModel, NestedFrozenDictsOrTuplesModel, NestedFrozenTuplesModel) +from omnipy.modules.frozen.typedefs import KeyT, ValT -_KeyT = TypeVar('_KeyT', bound=Hashable) -_ScT = TypeVar('_ScT') - -class NestedFrozenTuplesDataset(Dataset[NestedFrozenTuplesModel[_ScT]], Generic[_ScT]): +class NestedFrozenDictsOrTuplesDataset(Dataset[NestedFrozenDictsOrTuplesModel[KeyT, ValT]], + Generic[KeyT, ValT]): ... -class NestedFrozenDictsDataset(Dataset[NestedFrozenDictsModel[_KeyT, _ScT]], Generic[_KeyT, _ScT]): +class NestedFrozenTuplesDataset(Dataset[NestedFrozenTuplesModel[ValT]], Generic[ValT]): ... -class NestedFrozenDictsOrTuplesDataset(Dataset[NestedFrozenDictsOrTuplesModel[_KeyT, _ScT]], - Generic[_KeyT, _ScT]): +class NestedFrozenDictsDataset(Dataset[NestedFrozenDictsModel[KeyT, ValT]], Generic[KeyT, ValT]): ... diff --git a/src/omnipy/modules/frozen/models.py b/src/omnipy/modules/frozen/models.py index 287a184f..7e1345b2 100644 --- a/src/omnipy/modules/frozen/models.py +++ b/src/omnipy/modules/frozen/models.py @@ -4,8 +4,9 @@ from omnipy.data.model import Model +from ...data.helpers import TypeVarStore from ..general.models import NotIterableExceptStrOrBytesModel -from .typedefs import FrozenDict +from .typedefs import FrozenDict, KeyT, ValT # TODO: Follow pydantic topic https://github.com/pydantic/pydantic/issues/6868 on MappingProxyType. # Used way too much energy to implement (and test) recursive frozen models, only to discover @@ -26,16 +27,13 @@ # Basic building block models -_KeyT = TypeVar('_KeyT', default=str | Hashable) -_ValT = TypeVar('_ValT', default=NotIterableExceptStrOrBytesModel | object) - _FrozenBaseT = TypeVar('_FrozenBaseT', default='_FrozenAnyUnion') # class _FrozenScalarM(NotIterableExceptStrOrBytesModel): # ... -class _FrozenScalarM(Model[_ValT], Generic[_ValT]): +class _FrozenScalarM(Model[ValT], Generic[ValT]): _parse_data = NotIterableExceptStrOrBytesModel._parse_data @@ -43,42 +41,42 @@ class _FrozenTupleBaseM(Model[tuple[_FrozenBaseT, ...]], Generic[_FrozenBaseT]): ... -class _FrozenDictBaseM(Model[FrozenDict[_KeyT, _FrozenBaseT]], Generic[_KeyT, _FrozenBaseT]): +class _FrozenDictBaseM(Model[FrozenDict[KeyT, _FrozenBaseT]], Generic[KeyT, _FrozenBaseT]): ... -class _FrozenTupleM(_FrozenTupleBaseM['_FrozenAnyUnion'], Generic[_ValT]): +class _FrozenTupleM(_FrozenTupleBaseM['_FrozenAnyUnion'], Generic[ValT]): ... # -# class _FrozenDictM(_FrozenDictBaseM[_KeyT, '_FrozenAnyUnion'], Generic[_KeyT, _ValT]): +# class _FrozenDictM(_FrozenDictBaseM[KeyT, '_FrozenAnyUnion'], Generic[KeyT, ValT]): # ... -class _FrozenDictM(_FrozenDictBaseM[str | Hashable, '_FrozenAnyUnion'], Generic[_KeyT, _ValT]): +class _FrozenDictM(_FrozenDictBaseM[str | Hashable, '_FrozenAnyUnion'], Generic[KeyT, ValT]): ... -class _FrozenNoDictsM(_FrozenTupleBaseM['_FrozenNoDictsUnion'], Generic[_ValT]): +class _FrozenNoDictsM(_FrozenTupleBaseM['_FrozenNoDictsUnion'], Generic[ValT]): ... -# class _FrozenNoTuplesM(_FrozenDictBaseM['_KeyT', '_FrozenNoTuplesUnion'], Generic[_KeyT, _ValT]): +# class _FrozenNoTuplesM(_FrozenDictBaseM['KeyT', '_FrozenNoTuplesUnion'], Generic[KeyT, ValT]): # ... class _FrozenNoTuplesM(_FrozenDictBaseM[str | Hashable, '_FrozenNoTuplesUnion'], - Generic[_KeyT, _ValT]): + Generic[KeyT, ValT]): ... # TypeAliases _FrozenAnyUnion: TypeAlias = \ - _FrozenScalarM[_ValT] | _FrozenTupleM[_ValT] | _FrozenDictM[_KeyT, _ValT] -_FrozenNoDictsUnion: TypeAlias = _FrozenScalarM[_ValT] | _FrozenNoDictsM[_ValT] -_FrozenNoTuplesUnion: TypeAlias = _FrozenScalarM[_ValT] | _FrozenNoTuplesM[_KeyT, _ValT] + TypeVarStore[KeyT] | _FrozenScalarM[ValT]| _FrozenTupleM[ValT] | _FrozenDictM[KeyT, ValT] +_FrozenNoDictsUnion: TypeAlias = _FrozenNoDictsM[ValT] | _FrozenScalarM[ValT] +_FrozenNoTuplesUnion: TypeAlias = _FrozenNoTuplesM[KeyT, ValT] | _FrozenScalarM[ValT] # Basic models needs to update their forward_refs with type aliases declared above @@ -92,7 +90,7 @@ class _FrozenNoTuplesM(_FrozenDictBaseM[str | Hashable, '_FrozenNoTuplesUnion'], # -class NestedFrozenDictsOrTuplesModel(Model[_FrozenAnyUnion], Generic[_KeyT, _ValT]): +class NestedFrozenDictsOrTuplesModel(Model[_FrozenAnyUnion], Generic[KeyT, ValT]): """ Recursive model for nested immutable containers (FrozenDict and tuples). Not functional. @@ -101,7 +99,7 @@ class NestedFrozenDictsOrTuplesModel(Model[_FrozenAnyUnion], Generic[_KeyT, _Val """ -class NestedFrozenTuplesModel(Model[_FrozenNoDictsM[_ValT]], Generic[_ValT]): +class NestedFrozenTuplesModel(Model[_FrozenNoDictsM[ValT]], Generic[ValT]): """ Recursive model for nested tuples. @@ -110,7 +108,7 @@ class NestedFrozenTuplesModel(Model[_FrozenNoDictsM[_ValT]], Generic[_ValT]): """ -class NestedFrozenDictsModel(Model[_FrozenNoTuplesM[_KeyT, _ValT]], Generic[_KeyT, _ValT]): +class NestedFrozenDictsModel(Model[_FrozenNoTuplesM[KeyT, ValT]], Generic[KeyT, ValT]): """ Recursive model for nested FrozenDicts. diff --git a/src/omnipy/modules/frozen/typedefs.py b/src/omnipy/modules/frozen/typedefs.py index d75815cd..feb568cd 100644 --- a/src/omnipy/modules/frozen/typedefs.py +++ b/src/omnipy/modules/frozen/typedefs.py @@ -1,14 +1,18 @@ from collections import UserDict from types import MappingProxyType -from typing import Generic, Hashable, Sequence, TypeVar +from typing import Generic, Hashable, Sequence -_KeyT = TypeVar('_KeyT', bound=Hashable) -_ValT = TypeVar('_ValT') +from typing_extensions import TypeVar + +from omnipy import NotIterableExceptStrOrBytesModel + +KeyT = TypeVar('KeyT', default=str | Hashable) +ValT = TypeVar('ValT', bound=NotIterableExceptStrOrBytesModel | object, default=None) # Unfortunately, MappingProxyType is declared as a final class, which means it cannot be subclassed. # Inheriting from UserDict is a workaround. -class FrozenDict(UserDict[_KeyT, _ValT], Generic[_KeyT, _ValT]): +class FrozenDict(UserDict[KeyT, ValT], Generic[KeyT, ValT]): """ FrozenDict works exactly like a dict except that it cannot be modified after initialisation: @@ -50,11 +54,11 @@ class calls the type argument without parameters to determine the default value NestedFrozenDictsModel, NestedTuplesModel and NestedFrozenCollectionsModel. """ def __init__(self, - unfrozen_dict: dict[_KeyT, _ValT] | Sequence[tuple[_KeyT, _ValT]] | None = None, + unfrozen_dict: dict[KeyT, ValT] | Sequence[tuple[KeyT, ValT]] | None = None, /, **kwargs): super().__init__(unfrozen_dict, **kwargs) - self.data: MappingProxyType[_KeyT, _ValT] = MappingProxyType(self.data) # type: ignore + self.data: MappingProxyType[KeyT, ValT] = MappingProxyType(self.data) # type: ignore def __repr__(self): return f"{self.__class__.__name__}({self.data if hasattr(self, 'data') else ''})" diff --git a/src/omnipy/modules/general/models.py b/src/omnipy/modules/general/models.py index 9ceca288..20c3e192 100644 --- a/src/omnipy/modules/general/models.py +++ b/src/omnipy/modules/general/models.py @@ -1,9 +1,10 @@ -from typing import Generic, get_args, Hashable, TypeAlias +from typing import Any, Generic, get_args, Hashable, TypeAlias +from pydantic.utils import lenient_isinstance from typing_extensions import TypeVar -from omnipy.data.helpers import TypeVarStore -from omnipy.data.model import Model +from omnipy.data.helpers import TypeVarStore1, TypeVarStore2, TypeVarStore3, TypeVarStore4 +from omnipy.data.model import is_model_instance, Model from omnipy.util.helpers import is_iterable @@ -30,8 +31,12 @@ class NotIterableExceptStrOrBytesModel(Model[object | None]): """ @classmethod def _parse_data(cls, data: object) -> object: + if isinstance(data, NotIterableExceptStrOrBytesModel): + return data + assert isinstance(data, str) or isinstance(data, bytes) or not is_iterable(data), \ f'Data of type {type(data)} is iterable' + return data @@ -51,39 +56,46 @@ def _parse_data(cls, data: object) -> object: class ChainMixin: @classmethod - def _parse_data(cls, data) -> object: - stores = get_args(cls.outer_type(with_args=True))[:-1] - for store in stores: - model = get_args(store)[0] + def _parse_data(cls, data: object) -> object: + type_args = get_args(cls.outer_type(with_args=True)) + store_models = [get_args(store)[0] for store in type_args[1:-1]] + all_models = [type_args[0]] + store_models + [type_args[-1]] + all_models.reverse() + + if isinstance(data, all_models[-1]): + return data + + assert isinstance(data, all_models[0]), \ + f'Expected data of type {all_models[0]}, got {type(data)}' + + for model in all_models[1:]: data = model(data) return data -class Chain2(ChainMixin, Model[TypeVarStore[U] | TypeVarStore[V] | object], Generic[U, V]): +class Chain2(ChainMixin, Model[V | U], Generic[U, V]): ... -class Chain3(ChainMixin, - Model[TypeVarStore[U] | TypeVarStore[V] | TypeVarStore[W] | object], - Generic[U, V, W]): +class Chain3(ChainMixin, Model[W | TypeVarStore1[V] | U], Generic[U, V, W]): ... -class Chain4(ChainMixin, - Model[TypeVarStore[U] | TypeVarStore[V] | TypeVarStore[W] | TypeVarStore[X] | object], - Generic[U, V, W, X]): +class Chain4(ChainMixin, Model[X | TypeVarStore2[W] | TypeVarStore1[V] | U], Generic[U, V, W, X]): ... -class Chain5(ChainMixin, - Model[TypeVarStore[U] | TypeVarStore[V] | TypeVarStore[W] | TypeVarStore[X] - | TypeVarStore[Y] | object], - Generic[U, V, W, X, Y]): +class Chain5( + ChainMixin, + Model[Y | TypeVarStore3[X] | TypeVarStore2[W] | TypeVarStore1[V] | U], + Generic[U, V, W, X, Y], +): ... -class Chain6(ChainMixin, - Model[TypeVarStore[U] | TypeVarStore[V] | TypeVarStore[W] | TypeVarStore[X] - | TypeVarStore[Y] | TypeVarStore[Z] | object], - Generic[U, V, W, X, Y, Z]): +class Chain6( + ChainMixin, + Model[Z | TypeVarStore4[Y] | TypeVarStore3[X] | TypeVarStore2[W] | TypeVarStore1[V] | U], + Generic[U, V, W, X, Y, Z], +): ... diff --git a/src/omnipy/modules/json/datasets.py b/src/omnipy/modules/json/datasets.py index d4f9114c..7822d828 100644 --- a/src/omnipy/modules/json/datasets.py +++ b/src/omnipy/modules/json/datasets.py @@ -1,4 +1,6 @@ -from typing import Generic, TypeVar +from typing import Generic + +from typing_extensions import TypeVar from omnipy.data.dataset import Dataset from omnipy.data.model import Model @@ -29,7 +31,7 @@ # TODO: call omnipy modules something else than modules, to distinguish from Python modules. # Perhaps plugins? # -_JsonModelT = TypeVar('_JsonModelT', bound=Model) +_JsonModelT = TypeVar('_JsonModelT', bound=Model, default=JsonModel) class _JsonBaseDataset(Dataset[_JsonModelT], Generic[_JsonModelT]): diff --git a/src/omnipy/modules/prefect/engine/prefect.py b/src/omnipy/modules/prefect/engine/prefect.py index cf113dc3..803612d0 100644 --- a/src/omnipy/modules/prefect/engine/prefect.py +++ b/src/omnipy/modules/prefect/engine/prefect.py @@ -84,6 +84,7 @@ def task_flow(*inner_args, **inner_kwargs): # LinearFlowRunnerEngine def _init_linear_flow(self, linear_flow: IsLinearFlow) -> Any: assert isinstance(self._config, PrefectEngineConfig) + # flow_kwargs = dict(name=linear_flow.name, persist_result=True, result_storage='S3/minio-s3') flow_kwargs = dict(name=linear_flow.name,) call_func = self.default_linear_flow_run_decorator(linear_flow) diff --git a/src/omnipy/modules/prefect/settings/logging.yml b/src/omnipy/modules/prefect/settings/logging.yml index fd95625a..abd789b2 100644 --- a/src/omnipy/modules/prefect/settings/logging.yml +++ b/src/omnipy/modules/prefect/settings/logging.yml @@ -14,15 +14,14 @@ formatters: datefmt: "%H:%M:%S" standard: + (): prefect.logging.formatters.PrefectFormatter format: "%(asctime)s.%(msecs)03d | %(levelname)-7s | %(name)s - %(message)s" + flow_run_fmt: "%(asctime)s.%(msecs)03d | %(levelname)-7s | Flow run %(flow_run_name)r - %(message)s" + task_run_fmt: "%(asctime)s.%(msecs)03d | %(levelname)-7s | Task run %(task_run_name)r - %(message)s" datefmt: "%H:%M:%S" - flow_runs: - format: "%(asctime)s.%(msecs)03d | %(levelname)-7s | Flow run %(flow_run_name)r - %(message)s" - datefmt: "%H:%M:%S" - - task_runs: - format: "%(asctime)s.%(msecs)03d | %(levelname)-7s | Task run %(task_run_name)r - %(message)s" + debug: + format: "%(asctime)s.%(msecs)03d | %(levelname)-7s | %(threadName)-12s | %(name)s - %(message)s" datefmt: "%H:%M:%S" json: @@ -42,68 +41,74 @@ handlers: console: level: 0 - class: logging.StreamHandler + class: prefect.logging.handlers.PrefectConsoleHandler formatter: standard + styles: + log.web_url: bright_blue + log.local_url: bright_blue - console_flow_runs: - level: 0 - class: logging.StreamHandler - formatter: flow_runs + log.info_level: cyan + log.warning_level: yellow3 + log.error_level: red3 + log.critical_level: bright_red - console_task_runs: - level: 0 - class: logging.StreamHandler - formatter: task_runs + log.completed_state: green + log.cancelled_state: yellow3 + log.failed_state: red3 + log.crashed_state: bright_red + + log.flow_run_name: magenta + log.flow_name: bold magenta - orion: +# api: +# level: 0 +# class: prefect.logging.handlers.APILogHandler + + + debug: level: 0 - class: prefect.logging.handlers.OrionHandler + class: logging.StreamHandler + formatter: debug loggers: prefect: level: "${PREFECT_LOGGING_LEVEL}" -# handlers: [console] - handlers: [] -# propagate: no - propagate: yes prefect.extra: level: "${PREFECT_LOGGING_LEVEL}" -# handlers: [orion, console] - handlers: [orion] -# propagate: no - propagate: yes + handlers: [api] prefect.flow_runs: level: NOTSET -# handlers: [orion, console_flow_runs] - handlers: [orion] -# propagate: no - propagate: yes + handlers: [api] prefect.task_runs: level: NOTSET -# handlers: [orion, console_task_runs] - handlers: [orion] -# propagate: no - propagate: yes + handlers: [api] - prefect.orion: + prefect.server: level: "${PREFECT_LOGGING_SERVER_LEVEL}" + prefect.client: + level: "${PREFECT_LOGGING_LEVEL}" + + prefect.infrastructure: + level: "${PREFECT_LOGGING_LEVEL}" + + prefect._internal: + level: "${PREFECT_LOGGING_INTERNAL_LEVEL}" + propagate: false + handlers: [debug] + uvicorn: level: "${PREFECT_LOGGING_SERVER_LEVEL}" - handlers: [console] - propagate: no fastapi: level: "${PREFECT_LOGGING_SERVER_LEVEL}" - handlers: [console] - propagate: no ## The root logger: any logger without propagation disabled sends to here as well #root: # # By default, we display warning level logs from any library in the console # # to match Python's default behavior while formatting logs nicely # level: WARNING -# handlers: [console] +# handlers: [console] \ No newline at end of file diff --git a/src/omnipy/modules/raw/tasks.py b/src/omnipy/modules/raw/tasks.py index a31bb744..b0ff99db 100644 --- a/src/omnipy/modules/raw/tasks.py +++ b/src/omnipy/modules/raw/tasks.py @@ -1,17 +1,19 @@ +from collections import deque from copy import deepcopy from functools import reduce from io import StringIO from itertools import chain from operator import add, ior import os -from typing import TypeVar from chardet import UniversalDetector +from typing_extensions import TypeVar from omnipy.compute.task import TaskTemplate from omnipy.compute.typing import mypy_fix_task_template from omnipy.data.dataset import Dataset, Model +from ...util.setdeque import SetDeque from .datasets import StrDataset from .protocols import IsModifyAllLinesCallable, IsModifyContentsCallable, IsModifyEachLineCallable @@ -82,18 +84,22 @@ def modify_all_lines( return os.linesep.join(modified_lines) -_ModelT = TypeVar('_ModelT', bound=Model) +_SequenceModelT = TypeVar( + '_SequenceModelT', bound=Model, default=Model[str | bytes | list | tuple | deque]) @mypy_fix_task_template @TaskTemplate() -def concat_all(dataset: Dataset[_ModelT]) -> _ModelT: +def concat_all(dataset: Dataset[_SequenceModelT]) -> _SequenceModelT: return reduce(add, (val for val in dataset.values())) +_UniqueModelT = TypeVar('_UniqueModelT', bound=Model, default=Model[dict | set | SetDeque]) + + @mypy_fix_task_template @TaskTemplate() -def union_all(dataset: Dataset[_ModelT]) -> _ModelT: +def union_all(dataset: Dataset[_UniqueModelT]) -> _UniqueModelT: all_vals = tuple(val for val in dataset.values()) assert len(all_vals) > 0 first_val = deepcopy(all_vals[0]) diff --git a/src/omnipy/modules/tables/models.py b/src/omnipy/modules/tables/models.py index 7920d35e..fbb2d4e4 100644 --- a/src/omnipy/modules/tables/models.py +++ b/src/omnipy/modules/tables/models.py @@ -58,13 +58,14 @@ def col_names(self) -> tuple[str]: return tuple(col_names.keys()) -_PydanticModelT = TypeVar('_PydanticModelT', bound=BaseModel) +_PydanticBaseModelT = TypeVar('_PydanticBaseModelT', bound=BaseModel) +_PydanticRecordT = TypeVar('_PydanticRecordT', bound=BaseModel) -class PydanticRecordModel(Model[_PydanticModelT | JsonListOfScalarsModel], - Generic[_PydanticModelT]): +class PydanticRecordModel(Model[_PydanticBaseModelT | JsonListOfScalarsModel], + Generic[_PydanticBaseModelT]): @classmethod - def _parse_data(cls, data: _PydanticModelT | JsonListOfScalarsModel) -> _PydanticModelT: + def _parse_data(cls, data: _PydanticBaseModelT | JsonListOfScalarsModel) -> _PydanticBaseModelT: match data: case JsonListOfScalarsModel(): pydantic_model = get_args(cls.outer_type(with_args=True))[0] @@ -75,10 +76,8 @@ def _parse_data(cls, data: _PydanticModelT | JsonListOfScalarsModel) -> _Pydanti return data -class TableOfPydanticRecordsModel( - Model[list[PydanticRecordModel[_PydanticModelT]] - | Chain3[SplitToLinesModel, - SplitLinesToColumnsModelNew, - Model[list[PydanticRecordModel[_PydanticModelT]]]]], - Generic[_PydanticModelT]): +class TableOfPydanticRecordsModel(Chain3[SplitToLinesModel, + SplitLinesToColumnsModelNew, + Model[list[PydanticRecordModel[_PydanticRecordT]]]], + Generic[_PydanticRecordT]): ... diff --git a/src/omnipy/util/helpers.py b/src/omnipy/util/helpers.py index 2f4a3b3d..68059913 100644 --- a/src/omnipy/util/helpers.py +++ b/src/omnipy/util/helpers.py @@ -186,7 +186,7 @@ def evaluate_any_forward_refs_if_possible(in_type: TypeForm, def get_default_if_typevar(typ_: type[_ObjT] | TypeForm | TypeVar) -> type[_ObjT] | TypeForm: if isinstance(typ_, TypeVar): - if hasattr(typ_, '__default__') and typ_.__default__ is not None: + if hasattr(typ_, '__default__'): return typ_.__default__ else: raise TypeError(f'The TypeVar "{typ_.__name__}" needs to specify a default value. ' @@ -377,6 +377,7 @@ def get_deepcopy_object_ids(self) -> SetDeque[int]: return SetDeque(self._sub_obj_ids.keys()) def setup_deepcopy(self, obj): + print(f'setup_deepcopy({obj})') assert self._cur_deepcopy_obj_id is None, \ f'self._cur_deepcopy_obj_id is not None, but {self._cur_deepcopy_obj_id}' assert len(self._cur_keep_alive_list) == 0, \ diff --git a/tests/data/test_model.py b/tests/data/test_model.py index e6e11482..b03cdc61 100644 --- a/tests/data/test_model.py +++ b/tests/data/test_model.py @@ -1,4 +1,4 @@ -from collections.abc import Sequence +from collections.abc import Callable, Sequence import gc from math import floor import os @@ -6,7 +6,6 @@ from types import MappingProxyType, MethodType, NotImplementedType from typing import (Annotated, Any, - Callable, cast, ForwardRef, Generic, @@ -235,13 +234,13 @@ def test_get_inner_outer_type() -> None: assert dict_of_strings_to_list_of_ints_model.inner_type() == list assert dict_of_strings_to_list_of_ints_model.inner_type(with_args=True) == list[int] assert dict_of_strings_to_list_of_ints_model.is_nested_type() is True - - fake_optional_model = Model[Annotated[Optional[dict[str, list[int]]], 'someone else']]() - assert fake_optional_model.outer_type() == dict - assert fake_optional_model.outer_type(with_args=True) == dict[str, list[int]] - assert fake_optional_model.inner_type() == list - assert fake_optional_model.inner_type(with_args=True) == list[int] - assert fake_optional_model.is_nested_type() is True + # + # fake_optional_model = Model[Annotated[Optional[dict[str, list[int]]], 'someone else']]() + # assert fake_optional_model.outer_type() == dict + # assert fake_optional_model.outer_type(with_args=True) == dict[str, list[int]] + # assert fake_optional_model.inner_type() == list + # assert fake_optional_model.inner_type(with_args=True) == list[int] + # assert fake_optional_model.is_nested_type() is True def test_equality_other_models() -> None: @@ -747,10 +746,11 @@ class FirstTypeNotInstantiatableUnionModel(Model[Any | str]): assert FirstTypeNotInstantiatableUnionModel().to_data() == '' - with pytest.raises(TypeError): + class NoTypeInstantiatableUnionModel(Model[Any | Type]): + ... - class NoTypeInstantiatableUnionModel(Model[Any | Type]): - ... + with pytest.raises(TypeError): + NoTypeInstantiatableUnionModel() def test_union_default_value_if_any_none() -> None: @@ -1129,7 +1129,7 @@ class GenericListModel(Model[list[BaseT]], Generic[BaseT]): class ListModel(GenericListModel['FullModel']): ... - FullModel: TypeAlias = Union[ListModel, MaybeNumberModel] + FullModel: TypeAlias = Union[MaybeNumberModel, ListModel] ListModel.update_forward_refs(FullModel=FullModel) @@ -1752,6 +1752,7 @@ class SimplePydanticModel(BaseModel): model = Model[SimplePydanticModel](SimplePydanticModel(value=[123])) # type: ignore[arg-type] _assert_no_snapshot(model) + _assert_no_snapshot(model.contents.value) # Just accessing a field of a pydantic model through __getattr__ is enough to trigger a snapshot # of the parent @@ -2206,6 +2207,32 @@ def test_mimic_simple_list_operator_with_auto_convert( 'abc' + model # type: ignore[operator] +def test_mimic_hash_method(): + hashable_model = Model[str]('Hello World!') + assert hash(hashable_model) != 0 + + unhashable_model = Model[list[int]]() + with pytest.raises(TypeError): + hash(unhashable_model) + + +def test_mimic_call_method(): + callable_model = Model[Callable](lambda x: x + 1) + + assert callable_model(1) == 2 + + class MyClass: + ... + + not_callable_class_model = Model[MyClass]() + with pytest.raises(TypeError): + not_callable_class_model(1) + + not_callable_builtin_model = Model[int]() + with pytest.raises(TypeError): + not_callable_builtin_model(1) + + def test_mimic_sequence_convert_for_concat( runtime: Annotated[IsRuntime, pytest.fixture], skip_test_if_dynamically_convert_elements_to_models: Annotated[None, pytest.fixture], @@ -2390,8 +2417,6 @@ def test_mimic_concatenation_for_converted_models( def test_mimic_concatenation_for_converted_models_with_incompatible_contents_except_to_data( - runtime: Annotated[IsRuntime, pytest.fixture], - assert_model_if_dyn_conv_else_val: Annotated[AssertModelOrValFunc, pytest.fixture], ) -> None: class MyList(Generic[T]): def __init__(self, *args: T): @@ -2716,6 +2741,7 @@ def test_mimic_concat_less_than_five_model_add_variants_with_other_type_in_and_i def test_mimic_concat_all_less_than_five_model_add_variants_with_unsupported_input( all_add_variants: Annotated[tuple[bool, bool, bool, bool, bool], pytest.fixture], all_less_than_five_model_add_variants: Annotated[Model[MyNumberBase], pytest.fixture], + skip_test_if_dynamically_convert_elements_to_models: Annotated[None, pytest.fixture], ): has_add, has_radd, has_iadd, other_type_in, other_type_out = all_add_variants less_than_five_model = all_less_than_five_model_add_variants @@ -3789,8 +3815,6 @@ def test_parametrized_model_new() -> None: # assert ParamUpperStrModel().is_param_model() assert ParamUpperStrModel('foo').contents == 'foo' - asd = ParamUpperStrModel.adjust - # reveal_type(asd) MyUpperStrModel = ParamUpperStrModel.adjust('MyUpperStrModel', upper=True) assert MyUpperStrModel('bar').contents == 'BAR' diff --git a/tests/integration/novel/full/helpers/models.py b/tests/integration/novel/full/helpers/models.py index fafde829..2a48460b 100644 --- a/tests/integration/novel/full/helpers/models.py +++ b/tests/integration/novel/full/helpers/models.py @@ -69,6 +69,8 @@ def record_schema_factory(data_file: str, class Config(BaseConfig): extra = Extra.forbid + # Force config.dynamically_convert... is False + return create_model( data_file, __base__=RecordSchemaBase, diff --git a/tests/integration/novel/full/test_multi_model_dataset.py b/tests/integration/novel/full/test_multi_model_dataset.py index d0481b15..6080a5ff 100644 --- a/tests/integration/novel/full/test_multi_model_dataset.py +++ b/tests/integration/novel/full/test_multi_model_dataset.py @@ -110,6 +110,7 @@ def test_specialize_record_models_signature_and_return_type_func( @pc.parametrize_with_cases('case', cases='.cases.flows', has_tag='specialize_record_models') def test_run_specialize_record_models_consistent_types( runtime_all_engines: Annotated[None, pytest.fixture], # noqa + skip_test_if_dynamically_convert_elements_to_models, case: FlowCase): specialize_record_models = case.flow_template.apply() diff --git a/tests/modules/frozen/cases/frozen_data.py b/tests/modules/frozen/cases/frozen_data.py index 0411f5e5..78989c20 100644 --- a/tests/modules/frozen/cases/frozen_data.py +++ b/tests/modules/frozen/cases/frozen_data.py @@ -159,10 +159,12 @@ class FrozenDictsOrTuplesDataPoints: return CaseInfo( name='test_frozen_dicts_or_tuples', prefix2model_classes={ - 'ftd': (NestedFrozenDictsOrTuplesModel, NestedFrozenDictsOrTuplesModel[FSK, FSV]) + # 'ftd': (NestedFrozenDictsOrTuplesModel, NestedFrozenDictsOrTuplesModel[FSK, FSV]) + 'ftd': (NestedFrozenDictsOrTuplesModel[FSK, FSV],) }, prefix2dataset_classes={ - 'ftd': (NestedFrozenDictsOrTuplesDataset, NestedFrozenDictsOrTuplesDataset[FSK, FSV]) + # 'ftd': (NestedFrozenDictsOrTuplesDataset, NestedFrozenDictsOrTuplesDataset[FSK, FSV]) + 'ftd': (NestedFrozenDictsOrTuplesDataset[FSK, FSV],) }, data_points=FrozenDictsOrTuplesDataPoints(), ) diff --git a/tests/modules/frozen/test_models.py b/tests/modules/frozen/test_models.py index 1d4b08e8..27538e88 100644 --- a/tests/modules/frozen/test_models.py +++ b/tests/modules/frozen/test_models.py @@ -5,6 +5,7 @@ import pytest_cases as pc from omnipy.data.model import Model +from omnipy.modules.frozen.models import NestedFrozenDictsOrTuplesModel from omnipy.modules.frozen.typedefs import FrozenDict from ..helpers.classes import CaseInfo @@ -36,6 +37,8 @@ class FrozenDictOfInt2NoneModel(Model[FrozenDict[int, NoneModel]]): @pc.parametrize_with_cases('case', cases='.cases.frozen_data') def test_nested_frozen_models(case: CaseInfo) -> None: + # NestedFrozenDictsOrTuplesModel[str, None | int](None) + for field in fields(case.data_points): name = field.name for model_cls in case.model_classes_for_data_point(name): @@ -51,6 +54,7 @@ def test_nested_frozen_models(case: CaseInfo) -> None: model_cls(data) # print(f'Error: {e}') else: + print(data) model_obj = model_cls(data) # print(f'repr(model_obj): {repr(model_obj)}') diff --git a/tests/modules/general/test_models.py b/tests/modules/general/test_models.py index 86492826..7630c920 100644 --- a/tests/modules/general/test_models.py +++ b/tests/modules/general/test_models.py @@ -1,7 +1,14 @@ from pydantic import ValidationError import pytest -from omnipy.modules.general.models import NotIterableExceptStrOrBytesModel +from omnipy.modules.general.models import (Chain2, + Chain3, + Chain4, + Chain5, + Chain6, + NotIterableExceptStrOrBytesModel) + +from .helpers.models import MyList, MyListModel, RotateOneCharModel, SplitCharsModel def test_not_iterable_except_str_model(): @@ -24,3 +31,90 @@ def test_not_iterable_except_str_model(): assert NotIterableExceptStrOrBytesModel('1234').contents == '1234' assert NotIterableExceptStrOrBytesModel('æøå'.encode('utf8')).contents == 'æøå'.encode('utf8') + + +def test_chain2_model(): + + MyListModel(MyList(['a', 'b', 'c'])) + + class SplitCharsToMyListModel(Chain2[ + SplitCharsModel, + MyListModel, + ]): + ... + + model = SplitCharsToMyListModel('abc') + assert model.contents == MyListModel(MyList('a', 'b', 'c')) + assert model.to_data() == ['a', 'b', 'c'] + + +def test_concat_chain2_model_with_to_data_conversion(): + class SplitCharsToMyListModel(Chain2[ + SplitCharsModel, + MyListModel, + ]): + ... + + stream = SplitCharsModel('abc') + SplitCharsToMyListModel('def') + assert stream.to_data() == ['a', 'b', 'c', 'd', 'e', 'f'] + + stream = SplitCharsToMyListModel('abc') + SplitCharsModel('def') + assert stream.to_data() == ['a', 'b', 'c', 'd', 'e', 'f'] + + +def test_chain3_model(): + class RotateOneAndSplitCharsToMyListModel(Chain3[ + RotateOneCharModel, + SplitCharsModel, + MyListModel, + ]): + ... + + model = RotateOneAndSplitCharsToMyListModel('abcdefg') + assert model.contents == MyListModel(MyList('b', 'c', 'd', 'e', 'f', 'g', 'a')) + assert model.to_data() == ['b', 'c', 'd', 'e', 'f', 'g', 'a'] + + +def test_chain4_model(): + class RotateTwoAndSplitCharsToMyListModel(Chain4[ + RotateOneCharModel, + RotateOneCharModel, + SplitCharsModel, + MyListModel, + ]): + ... + + model = RotateTwoAndSplitCharsToMyListModel('abcdefg') + assert model.contents == MyListModel(MyList('c', 'd', 'e', 'f', 'g', 'a', 'b')) + assert model.to_data() == ['c', 'd', 'e', 'f', 'g', 'a', 'b'] + + +def test_chain5_model(): + class RotateThreeAndSplitCharsToMyListModel(Chain5[ + RotateOneCharModel, + RotateOneCharModel, + RotateOneCharModel, + SplitCharsModel, + MyListModel, + ]): + ... + + model = RotateThreeAndSplitCharsToMyListModel('abcdefg') + assert model.contents == MyListModel(MyList('d', 'e', 'f', 'g', 'a', 'b', 'c')) + assert model.to_data() == ['d', 'e', 'f', 'g', 'a', 'b', 'c'] + + +def test_chain6_model(): + class RotateThreeAndSplitCharsToMyListModel(Chain6[ + RotateOneCharModel, + RotateOneCharModel, + RotateOneCharModel, + RotateOneCharModel, + SplitCharsModel, + MyListModel, + ]): + ... + + model = RotateThreeAndSplitCharsToMyListModel('abcdefg') + assert model.contents == MyListModel(MyList('e', 'f', 'g', 'a', 'b', 'c', 'd')) + assert model.to_data() == ['e', 'f', 'g', 'a', 'b', 'c', 'd']