diff --git a/mega_change.patch b/mega_change.patch new file mode 100644 index 00000000..150ab095 --- /dev/null +++ b/mega_change.patch @@ -0,0 +1,1015 @@ +diff --git a/src/protean/adapters/repository/elasticsearch.py b/src/protean/adapters/repository/elasticsearch.py +index 54d0fff..19a4b93 100644 +--- a/src/protean/adapters/repository/elasticsearch.py ++++ b/src/protean/adapters/repository/elasticsearch.py +@@ -12,6 +12,7 @@ from elasticsearch import Elasticsearch + from elasticsearch.exceptions import NotFoundError + from elasticsearch_dsl import Document, Index, Keyword, Mapping, Search, query + ++from protean.container import Options + from protean.exceptions import ObjectNotFoundError + from protean.fields import Reference + from protean.globals import current_domain +@@ -423,9 +424,7 @@ class ESProvider(BaseProvider): + if entity_cls.meta_.schema_name in self._model_classes: + model_cls = self._model_classes[entity_cls.meta_.schema_name] + else: +- from protean.core.model import ModelMeta +- +- meta_ = ModelMeta() ++ meta_ = Options() + meta_.entity_cls = entity_cls + + # Construct Inner Index class with options +diff --git a/src/protean/adapters/repository/memory.py b/src/protean/adapters/repository/memory.py +index f40b9b7..6f7a45f 100644 +--- a/src/protean/adapters/repository/memory.py ++++ b/src/protean/adapters/repository/memory.py +@@ -11,6 +11,7 @@ from threading import Lock + from typing import Any + from uuid import UUID + ++from protean.container import Options + from protean.core.model import BaseModel + from protean.exceptions import ObjectNotFoundError, ValidationError + from protean.fields.basic import Auto +@@ -148,12 +149,10 @@ class MemoryProvider(BaseProvider): + if key not in ["Meta", "__module__", "__doc__", "__weakref__"] + } + +- from protean.core.model import ModelMeta ++ meta_nested_class = type("Meta", (object,), {"entity_cls": entity_cls}) ++ meta_nested_class.__qualname__ = f"{entity_cls.__name__}Model.Meta" ++ custom_attrs.update({"Meta": meta_nested_class}) + +- meta_ = ModelMeta() +- meta_.entity_cls = entity_cls +- +- custom_attrs.update({"meta_": meta_}) + # FIXME Ensure the custom model attributes are constructed properly + decorated_model_cls = type( + model_cls.__name__, (MemoryModel, model_cls), custom_attrs +@@ -172,13 +171,11 @@ class MemoryProvider(BaseProvider): + if entity_cls.meta_.schema_name in self._model_classes: + model_cls = self._model_classes[entity_cls.meta_.schema_name] + else: +- from protean.core.model import ModelMeta +- +- meta_ = ModelMeta() +- meta_.entity_cls = entity_cls ++ meta_nested_class = type("Meta", (object,), {"entity_cls": entity_cls}) ++ meta_nested_class.__qualname__ = f"{entity_cls.__name__}Model.Meta" + + attrs = { +- "meta_": meta_, ++ "Meta": meta_nested_class, + } + # FIXME Ensure the custom model attributes are constructed properly + model_cls = type(entity_cls.__name__ + "Model", (MemoryModel,), attrs) +diff --git a/src/protean/adapters/repository/sqlalchemy.py b/src/protean/adapters/repository/sqlalchemy.py +index 867deb5..4fb548d 100644 +--- a/src/protean/adapters/repository/sqlalchemy.py ++++ b/src/protean/adapters/repository/sqlalchemy.py +@@ -5,20 +5,19 @@ import json + import logging + import uuid + +-from abc import ABCMeta + from enum import Enum + from typing import Any + + import sqlalchemy.dialects.postgresql as psql + +-from sqlalchemy import Column, MetaData, and_, create_engine, or_, orm ++from sqlalchemy import Column, MetaData, and_, create_engine, or_, orm, text + from sqlalchemy import types as sa_types + from sqlalchemy.engine.url import make_url + from sqlalchemy.exc import DatabaseError +-from sqlalchemy.ext import declarative as sa_dec +-from sqlalchemy.ext.declarative import as_declarative, declared_attr ++from sqlalchemy.orm import DeclarativeBase, declared_attr + from sqlalchemy.types import CHAR, TypeDecorator + ++from protean.container import Options + from protean.core.value_object import BaseValueObject + from protean.core.model import BaseModel + from protean.exceptions import ( +@@ -134,10 +133,21 @@ def _custom_json_dumps(value): + return json.dumps(value, default=_default) + + +-class DeclarativeMeta(sa_dec.DeclarativeMeta, ABCMeta): +- """Metaclass for the Sqlalchemy declarative schema""" ++def derive_schema_name(model_cls): ++ # Retain schema name if already present, otherwise derive from entity class ++ if ( ++ hasattr(model_cls.meta_, "schema_name") ++ and model_cls.meta_.schema_name is not None ++ ): ++ return model_cls.meta_.schema_name ++ else: ++ return model_cls.meta_.entity_cls.meta_.schema_name ++ ++ ++class SqlalchemyModel(DeclarativeBase, BaseModel): ++ """Model representation for the Sqlalchemy Database""" + +- def __init__(cls, classname, bases, dict_): # noqa: C901 ++ def __init_subclass__(subclass, **kwargs): # noqa: C901 + field_mapping = { + Boolean: sa_types.Boolean, + Date: sa_types.Date, +@@ -166,13 +176,13 @@ class DeclarativeMeta(sa_dec.DeclarativeMeta, ABCMeta): + return field_mapping.get(field_cls) + + # Update the class attrs with the entity attributes +- if "meta_" in dict_: +- entity_cls = dict_["meta_"].entity_cls ++ if "meta_" in subclass.__dict__: ++ entity_cls = subclass.__dict__["meta_"].entity_cls + for _, field_obj in attributes(entity_cls).items(): + attribute_name = field_obj.attribute_name + + # Map the field if not in attributes +- if attribute_name not in cls.__dict__: ++ if attribute_name not in subclass.__dict__: + # Derive field based on field enclosed within ShadowField + if isinstance(field_obj, _ShadowField): + field_obj = field_obj.field_obj +@@ -185,7 +195,7 @@ class DeclarativeMeta(sa_dec.DeclarativeMeta, ABCMeta): + sa_type_cls = field_mapping_for(field_obj) + + # Upgrade to Postgresql specific Data Types +- if cls.metadata.bind.dialect.name == "postgresql": ++ if subclass.__dict__["engine"].dialect.name == "postgresql": + if field_cls == Dict and not field_obj.pickled: + sa_type_cls = psql.JSON + +@@ -228,35 +238,10 @@ class DeclarativeMeta(sa_dec.DeclarativeMeta, ABCMeta): + type_kwargs["length"] = field_obj.max_length + + # Update the attributes of the class +- # SQLAlchemy changed where `DeclarativeMeta.__init__` looks for class attributes +- # between versions 1.3 and 1.4, and then fixated on `cls.__dict__` in 1.4.36. +- # While the `setattr` below works with the latest SQLAlchemy version of 1.4.36, +- # it's better to populate both `dict_` as well as `cls.__dict__` to be compatible +- # with all 1.4.10+ versions of SQLAlchemy. +- # More info: +- # * https://github.com/sqlalchemy/sqlalchemy/issues/6791 +- # * https://github.com/sqlalchemy/sqlalchemy/issues/7900 + column = Column(sa_type_cls(*type_args, **type_kwargs), **col_args) +- setattr(cls, attribute_name, column) # Set class attribute +- dict_[attribute_name] = column # Set in dict_ ++ setattr(subclass, attribute_name, column) # Set class attribute + +- super().__init__(classname, bases, dict_) +- +- +-def derive_schema_name(model_cls): +- # Retain schema name if already present, otherwise derive from entity class +- if ( +- hasattr(model_cls.meta_, "schema_name") +- and model_cls.meta_.schema_name is not None +- ): +- return model_cls.meta_.schema_name +- else: +- return model_cls.meta_.entity_cls.meta_.schema_name +- +- +-@as_declarative(metaclass=DeclarativeMeta) +-class SqlalchemyModel(BaseModel): +- """Model representation for the Sqlalchemy Database""" ++ super().__init_subclass__(**kwargs) + + @declared_attr + def __tablename__(cls): +@@ -578,9 +563,9 @@ class SAProvider(BaseProvider): + self.conn_info["SCHEMA"] if "SCHEMA" in self.conn_info else "public" + ) + +- self._metadata = MetaData(bind=self._engine, schema=schema) ++ self._metadata = MetaData(schema=schema) + else: +- self._metadata = MetaData(bind=self._engine) ++ self._metadata = MetaData() + + # A temporary cache of already constructed model classes + self._model_classes = {} +@@ -655,13 +640,13 @@ class SAProvider(BaseProvider): + transaction = conn.begin() + + if self.conn_info["database"] == self.databases.sqlite.value: +- conn.execute("PRAGMA foreign_keys = OFF;") ++ conn.execute(text("PRAGMA foreign_keys = OFF;")) + + for table in self._metadata.sorted_tables: + conn.execute(table.delete()) + + if self.conn_info["database"] == self.databases.sqlite.value: +- conn.execute("PRAGMA foreign_keys = ON;") ++ conn.execute(text("PRAGMA foreign_keys = ON;")) + + transaction.commit() + +@@ -673,10 +658,10 @@ class SAProvider(BaseProvider): + for _, aggregate_record in self.domain.registry.aggregates.items(): + self.domain.repository_for(aggregate_record.cls)._dao + +- self._metadata.create_all() ++ self._metadata.create_all(self._engine) + + def _drop_database_artifacts(self): +- self._metadata.drop_all() ++ self._metadata.drop_all(self._engine) + self._metadata.clear() + + def decorate_model_class(self, entity_cls, model_cls): +@@ -712,15 +697,15 @@ class SAProvider(BaseProvider): + # Add the earlier copied columns to the custom attributes + custom_attrs = {**custom_attrs, **columns} + +- from protean.core.model import ModelMeta +- +- meta_ = ModelMeta(model_cls.meta_) ++ meta_ = Options(model_cls.meta_) + meta_.entity_cls = entity_cls + meta_.schema_name = ( + schema_name if meta_.schema_name is None else meta_.schema_name + ) + +- custom_attrs.update({"meta_": meta_, "metadata": self._metadata}) ++ custom_attrs.update( ++ {"meta_": meta_, "engine": self._engine, "metadata": self._metadata} ++ ) + # FIXME Ensure the custom model attributes are constructed properly + decorated_model_cls = type( + model_cls.__name__, (SqlalchemyModel, model_cls), custom_attrs +@@ -739,10 +724,8 @@ class SAProvider(BaseProvider): + if entity_cls.meta_.schema_name in self._model_classes: + model_cls = self._model_classes[entity_cls.meta_.schema_name] + else: +- from protean.core.model import ModelMeta +- + # Construct a new Meta object with existing values +- meta_ = ModelMeta() ++ meta_ = Options() + meta_.entity_cls = entity_cls + # If schema_name is not provided, sqlalchemy can throw + # sqlalchemy.exc.InvalidRequestError: Class does not +@@ -752,6 +735,7 @@ class SAProvider(BaseProvider): + + attrs = { + "meta_": meta_, ++ "engine": self._engine, + "metadata": self._metadata, + } + # FIXME Ensure the custom model attributes are constructed properly +diff --git a/src/protean/container.py b/src/protean/container.py +index 920d0cb..4353c34 100644 +--- a/src/protean/container.py ++++ b/src/protean/container.py +@@ -8,7 +8,6 @@ from collections import defaultdict + from typing import Any, Type, Union + + from protean.exceptions import ( +- IncorrectUsageError, + InvalidDataError, + NotSupportedError, + ValidationError, +@@ -91,7 +90,8 @@ class OptionsMixin: + """ + super().__init_subclass__() + +- # Retrieve inner Meta class ++ final_options = Options() ++ + # Gather `Meta` class/object if defined + options = getattr(subclass, "Meta", None) + +@@ -103,9 +103,13 @@ class OptionsMixin: + # `__qualname__` contains the Inner class name in the form of a dot notation: + # .. + if options and options.__qualname__.split(".")[-2] == subclass.__name__: +- subclass.meta_ = Options(options) +- else: +- subclass.meta_ = Options() ++ final_options = final_options + Options(options) ++ ++ # meta_object_options = getattr(subclass, "meta_", None) ++ # if meta_object_options: ++ # final_options = final_options + meta_object_options ++ ++ subclass.meta_ = final_options + + # Assign default options for remaining items + subclass._set_defaults() +@@ -376,23 +380,10 @@ class IdentityMixin: + ) + + setattr(new_class, _ID_FIELD_NAME, id_field.field_name) +- +- # If the aggregate/entity has been marked abstract, +- # and contains an identifier field, raise exception +- if new_class.meta_.abstract and id_field: +- raise IncorrectUsageError( +- { +- "_entity": [ +- f"Abstract Aggregate `{new_class.__name__}` marked as abstract cannot have" +- " identity fields" +- ] +- } +- ) + except StopIteration: + # If no id field is declared then create one +- # If the aggregate/entity is marked abstract, +- # avoid creating an identifier field. +- if not new_class.meta_.abstract: ++ # If the `auto_add_id_field` is False, avoid creating an identifier field. ++ if new_class.meta_.auto_add_id_field: + new_class.__create_id_field() + + @classmethod +diff --git a/src/protean/core/aggregate.py b/src/protean/core/aggregate.py +index c1f8a2c..41c64cb 100644 +--- a/src/protean/core/aggregate.py ++++ b/src/protean/core/aggregate.py +@@ -45,9 +45,6 @@ class BaseAggregate(EventedMixin, BaseEntity): + # Track current version of Aggregate + _version = Integer(default=-1) + +- class Meta: +- abstract = True +- + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + +@@ -58,6 +55,7 @@ class BaseAggregate(EventedMixin, BaseEntity): + @classmethod + def _default_options(cls): + return [ ++ ("auto_add_id_field", True), + ("provider", "default"), + ("model", None), + ("stream_name", inflection.underscore(cls.__name__)), +diff --git a/src/protean/core/application_service.py b/src/protean/core/application_service.py +index 130ade2..56d46e0 100644 +--- a/src/protean/core/application_service.py ++++ b/src/protean/core/application_service.py +@@ -20,9 +20,6 @@ class BaseApplicationService(Element, OptionsMixin): + + element_type = DomainObjects.APPLICATION_SERVICE + +- class Meta: +- abstract = True +- + def __new__(cls, *args, **kwargs): + if cls is BaseApplicationService: + raise TypeError("BaseApplicationService cannot be instantiated") +diff --git a/src/protean/core/command.py b/src/protean/core/command.py +index f95440c..4976098 100644 +--- a/src/protean/core/command.py ++++ b/src/protean/core/command.py +@@ -1,5 +1,5 @@ + from protean.container import BaseContainer, OptionsMixin +-from protean.exceptions import IncorrectUsageError, InvalidDataError, ValidationError ++from protean.exceptions import IncorrectUsageError, InvalidDataError, ValidationError, NotSupportedError + from protean.fields import Field + from protean.reflection import _ID_FIELD_NAME, declared_fields + from protean.utils import DomainObjects, derive_element_class +@@ -14,8 +14,10 @@ class BaseCommand(BaseContainer, OptionsMixin): + + element_type = DomainObjects.COMMAND + +- class Meta: +- abstract = True ++ def __new__(cls, *args, **kwargs): ++ if cls is BaseCommand: ++ raise NotSupportedError("BaseCommand cannot be instantiated") ++ return super().__new__(cls) + + def __init_subclass__(subclass) -> None: + super().__init_subclass__() +diff --git a/src/protean/core/domain_service.py b/src/protean/core/domain_service.py +index d6184a7..c04cbaf 100644 +--- a/src/protean/core/domain_service.py ++++ b/src/protean/core/domain_service.py +@@ -22,9 +22,6 @@ class BaseDomainService(Element, OptionsMixin): + + element_type = DomainObjects.DOMAIN_SERVICE + +- class Meta: +- abstract = True +- + def __new__(cls, *args, **kwargs): + if cls is BaseDomainService: + raise TypeError("BaseDomainService cannot be instantiated") +diff --git a/src/protean/core/entity.py b/src/protean/core/entity.py +index 98214cc..5181847 100644 +--- a/src/protean/core/entity.py ++++ b/src/protean/core/entity.py +@@ -110,8 +110,10 @@ class BaseEntity(IdentityMixin, OptionsMixin, BaseContainer): + + element_type = DomainObjects.ENTITY + +- class Meta: +- abstract = True ++ def __new__(cls, *args, **kwargs): ++ if cls is BaseEntity: ++ raise TypeError("BaseEntity cannot be instantiated") ++ return super().__new__(cls) + + def __init_subclass__(subclass) -> None: + super().__init_subclass__() +@@ -122,6 +124,7 @@ class BaseEntity(IdentityMixin, OptionsMixin, BaseContainer): + @classmethod + def _default_options(cls): + return [ ++ ("auto_add_id_field", True), + ("provider", "default"), + ("model", None), + ("part_of", None), +diff --git a/src/protean/core/event.py b/src/protean/core/event.py +index b1116b2..d23bd08 100644 +--- a/src/protean/core/event.py ++++ b/src/protean/core/event.py +@@ -18,8 +18,10 @@ class BaseEvent(BaseContainer, OptionsMixin): # FIXME Remove OptionsMixin + + element_type = DomainObjects.EVENT + +- class Meta: +- abstract = True ++ def __new__(cls, *args, **kwargs): ++ if cls is BaseEvent: ++ raise TypeError("BaseEvent cannot be instantiated") ++ return super().__new__(cls) + + def __init_subclass__(subclass) -> None: + super().__init_subclass__() +diff --git a/src/protean/core/event_handler.py b/src/protean/core/event_handler.py +index f720bba..169b881 100644 +--- a/src/protean/core/event_handler.py ++++ b/src/protean/core/event_handler.py +@@ -15,8 +15,10 @@ class BaseEventHandler(Element, HandlerMixin, OptionsMixin): + + element_type = DomainObjects.EVENT_HANDLER + +- class Meta: +- abstract = True ++ def __new__(cls, *args, **kwargs): ++ if cls is BaseEventHandler: ++ raise TypeError("BaseEventHandler cannot be instantiated") ++ return super().__new__(cls) + + @classmethod + def _default_options(cls): +@@ -30,11 +32,6 @@ class BaseEventHandler(Element, HandlerMixin, OptionsMixin): + ("source_stream", None), + ] + +- def __new__(cls, *args, **kwargs): +- if cls is BaseEventHandler: +- raise TypeError("BaseEventHandler cannot be instantiated") +- return super().__new__(cls) +- + + def event_handler_factory(element_cls, **opts): + element_cls = derive_element_class(element_cls, BaseEventHandler, **opts) +diff --git a/src/protean/core/event_sourced_aggregate.py b/src/protean/core/event_sourced_aggregate.py +index dcbee2e..1294552 100644 +--- a/src/protean/core/event_sourced_aggregate.py ++++ b/src/protean/core/event_sourced_aggregate.py +@@ -34,12 +34,16 @@ class BaseEventSourcedAggregate( + # Track current version of Aggregate + _version = Integer(default=-1) + +- class Meta: +- abstract = True ++ def __new__(cls, *args, **kwargs): ++ if cls is BaseEventSourcedAggregate: ++ raise TypeError("BaseEventSourcedAggregate cannot be instantiated") ++ return super().__new__(cls) ++ + + @classmethod + def _default_options(cls): + return [ ++ ("auto_add_id_field", True), + ("stream_name", inflection.underscore(cls.__name__)), + ] + +diff --git a/src/protean/core/model.py b/src/protean/core/model.py +index b03232b..ca467c2 100644 +--- a/src/protean/core/model.py ++++ b/src/protean/core/model.py +@@ -1,29 +1,11 @@ + from abc import abstractmethod + +-from protean.container import Element ++from protean.container import Element, OptionsMixin + from protean.exceptions import IncorrectUsageError +-from protean.utils import DomainObjects ++from protean.utils import DomainObjects, derive_element_class + + +-class ModelMeta: +- """Metadata info for the Model. +- +- Options: +- - ``entity_cls``: The Entity that this model is associated with +- """ +- +- def __init__(self, meta=None): +- if meta: +- self.entity_cls = getattr(meta, "entity_cls", None) +- self.schema_name = getattr(meta, "schema_name", None) +- self.database = getattr(meta, "database", None) +- else: +- self.entity_cls = None +- self.schema_name = None +- self.database = None +- +- +-class BaseModel(Element): ++class BaseModel(Element, OptionsMixin): + """This is a Model representing a data schema in the persistence store. A concrete implementation of this + model has to be provided by each persistence store plugin. + """ +@@ -35,6 +17,14 @@ class BaseModel(Element): + raise TypeError("BaseModel cannot be instantiated") + return super().__new__(cls) + ++ @classmethod ++ def _default_options(cls): ++ return [ ++ ("entity_cls", None), ++ ("schema_name", None), ++ ("database", None), ++ ] ++ + @classmethod + @abstractmethod + def from_entity(cls, entity): +@@ -47,23 +37,7 @@ class BaseModel(Element): + + + def model_factory(element_cls, **kwargs): +- element_cls.element_type = DomainObjects.MODEL +- +- if hasattr(element_cls, "Meta"): +- element_cls.meta_ = ModelMeta(element_cls.Meta) +- else: +- element_cls.meta_ = ModelMeta() +- +- if not (hasattr(element_cls.meta_, "entity_cls") and element_cls.meta_.entity_cls): +- element_cls.meta_.entity_cls = kwargs.pop("entity_cls", None) +- +- if not ( +- hasattr(element_cls.meta_, "schema_name") and element_cls.meta_.schema_name +- ): +- element_cls.meta_.schema_name = kwargs.pop("schema_name", None) +- +- if not (hasattr(element_cls.meta_, "database") and element_cls.meta_.database): +- element_cls.meta_.database = kwargs.pop("database", None) ++ element_cls = derive_element_class(element_cls, BaseModel, **kwargs) + + if not element_cls.meta_.entity_cls: + raise IncorrectUsageError( +diff --git a/src/protean/core/subscriber.py b/src/protean/core/subscriber.py +index eb5fb1f..51bb11e 100644 +--- a/src/protean/core/subscriber.py ++++ b/src/protean/core/subscriber.py +@@ -20,15 +20,15 @@ class BaseSubscriber(Element, OptionsMixin): + + element_type = DomainObjects.SUBSCRIBER + +- @classmethod +- def _default_options(cls): +- return [("event", None), ("broker", "default")] +- + def __new__(cls, *args, **kwargs): + if cls is BaseSubscriber: + raise TypeError("BaseSubscriber cannot be instantiated") + return super().__new__(cls) + ++ @classmethod ++ def _default_options(cls): ++ return [("event", None), ("broker", "default")] ++ + @abstractmethod + def __call__(self, event: BaseEvent) -> Optional[Any]: + """Placeholder method for receiving notifications on event""" +diff --git a/src/protean/core/value_object.py b/src/protean/core/value_object.py +index 54e37de..d934080 100644 +--- a/src/protean/core/value_object.py ++++ b/src/protean/core/value_object.py +@@ -17,8 +17,11 @@ logger = logging.getLogger(__name__) + class BaseValueObject(BaseContainer, OptionsMixin): + element_type = DomainObjects.VALUE_OBJECT + +- class Meta: +- abstract = True ++ def __new__(cls, *args, **kwargs): ++ if cls is BaseValueObject: ++ raise TypeError("BaseValueObject cannot be instantiated") ++ return super().__new__(cls) ++ + + def __init_subclass__(subclass) -> None: + super().__init_subclass__() +diff --git a/src/protean/core/view.py b/src/protean/core/view.py +index 74beef3..347e053 100644 +--- a/src/protean/core/view.py ++++ b/src/protean/core/view.py +@@ -16,8 +16,10 @@ logger = logging.getLogger(__name__) + class BaseView(BaseContainer, OptionsMixin): + element_type = DomainObjects.VIEW + +- class Meta: +- abstract = True ++ def __new__(cls, *args, **kwargs): ++ if cls is BaseView: ++ raise TypeError("BaseView cannot be instantiated") ++ return super().__new__(cls) + + @classmethod + def _default_options(cls): +diff --git a/src/protean/domain/__init__.py b/src/protean/domain/__init__.py +index 1083f03..3997601 100644 +--- a/src/protean/domain/__init__.py ++++ b/src/protean/domain/__init__.py +@@ -29,6 +29,7 @@ from protean.utils import ( + DomainObjects, + EventProcessing, + fqn, ++ factory_for + ) + + from .config import Config2, ConfigAttribute +@@ -371,50 +372,6 @@ class Domain: + def registry(self): + return self._domain_registry + +- def factory_for(self, domain_object_type): +- from protean.core.aggregate import aggregate_factory +- from protean.core.application_service import application_service_factory +- from protean.core.command import command_factory +- from protean.core.command_handler import command_handler_factory +- from protean.core.domain_service import domain_service_factory +- from protean.core.email import email_factory +- from protean.core.entity import entity_factory +- from protean.core.event import domain_event_factory +- from protean.core.event_handler import event_handler_factory +- from protean.core.event_sourced_aggregate import event_sourced_aggregate_factory +- from protean.core.model import model_factory +- from protean.core.repository import repository_factory +- from protean.core.serializer import serializer_factory +- from protean.core.subscriber import subscriber_factory +- from protean.core.value_object import value_object_factory +- from protean.core.view import view_factory +- +- factories = { +- DomainObjects.AGGREGATE.value: aggregate_factory, +- DomainObjects.APPLICATION_SERVICE.value: application_service_factory, +- DomainObjects.COMMAND.value: command_factory, +- DomainObjects.COMMAND_HANDLER.value: command_handler_factory, +- DomainObjects.EVENT.value: domain_event_factory, +- DomainObjects.EVENT_HANDLER.value: event_handler_factory, +- DomainObjects.EVENT_SOURCED_AGGREGATE.value: event_sourced_aggregate_factory, +- DomainObjects.DOMAIN_SERVICE.value: domain_service_factory, +- DomainObjects.EMAIL.value: email_factory, +- DomainObjects.ENTITY.value: entity_factory, +- DomainObjects.MODEL.value: model_factory, +- DomainObjects.REPOSITORY.value: repository_factory, +- DomainObjects.SUBSCRIBER.value: subscriber_factory, +- DomainObjects.SERIALIZER.value: serializer_factory, +- DomainObjects.VALUE_OBJECT.value: value_object_factory, +- DomainObjects.VIEW.value: view_factory, +- } +- +- if domain_object_type.value not in factories: +- raise IncorrectUsageError( +- {"_entity": [f"Unknown Element Type `{domain_object_type.value}`"]} +- ) +- +- return factories[domain_object_type.value] +- + def _register_element(self, element_type, element_cls, **kwargs): # noqa: C901 + """Register class into the domain""" + # Check if `element_cls` is already a subclass of the Element Type +@@ -428,7 +385,7 @@ class Domain: + # class Account: + # ``` + +- factory = self.factory_for(element_type) ++ factory = factory_for(element_type) + new_cls = factory(element_cls, **kwargs) + + if element_type == DomainObjects.MODEL: +diff --git a/src/protean/fields/basic.py b/src/protean/fields/basic.py +index 8c0823f..cc8f2f0 100644 +--- a/src/protean/fields/basic.py ++++ b/src/protean/fields/basic.py +@@ -374,14 +374,8 @@ class Identifier(Field): + ]: + raise ValidationError({"identity_type": ["Identity type not supported"]}) + +- # Pick identity type from domain configuration if not provided +- try: +- if not identity_type: +- identity_type = current_domain.config["identity_type"] +- except OutOfContextError: # Domain not active +- identity_type = IdentityType.STRING.value +- + self.identity_type = identity_type ++ + super().__init__(**kwargs) + + def _cast_to_type(self, value): +@@ -390,6 +384,12 @@ class Identifier(Field): + if not (isinstance(value, (UUID, str, int))) or isinstance(value, bool): + self.fail("invalid", value=value) + ++ # Fixate on IdentityType if not done already ++ # This happens the first time an identifier field instance is used. ++ # We don't try to fix this in the constructor because the Domain may not be available at that time. ++ if self.identity_type is None: ++ self.identity_type = current_domain.config["identity_type"] ++ + # Ensure that the value is of the right type + if self.identity_type == IdentityType.UUID.value: + if not isinstance(value, UUID): +diff --git a/src/protean/globals.py b/src/protean/globals.py +index 605ad0f..1c769b9 100644 +--- a/src/protean/globals.py ++++ b/src/protean/globals.py +@@ -1,5 +1,7 @@ + from __future__ import annotations + ++import warnings ++ + from typing import TYPE_CHECKING, Any + + if TYPE_CHECKING: +@@ -9,8 +11,6 @@ from functools import partial + + from werkzeug.local import LocalProxy, LocalStack + +-from protean.exceptions import OutOfContextError +- + _domain_ctx_err_msg = """\ + Working outside of domain context. + This typically means that you attempted to use functionality that needed +@@ -23,14 +23,22 @@ documentation for more information.\ + def _lookup_domain_object(name) -> Any: + top = _domain_context_stack.top + if top is None: +- raise OutOfContextError(_domain_ctx_err_msg) ++ warnings.warn( ++ _domain_ctx_err_msg, ++ stacklevel=3, ++ ) ++ return None + return getattr(top, name) + + + def _find_domain() -> Domain: + top = _domain_context_stack.top + if top is None: +- raise OutOfContextError(_domain_ctx_err_msg) ++ warnings.warn( ++ _domain_ctx_err_msg, ++ stacklevel=3, ++ ) ++ return None + return top.domain + + +diff --git a/src/protean/template/domain_template/.gitignore b/src/protean/template/domain_template/.gitignore +index efa407c..f3419ce 100644 +--- a/src/protean/template/domain_template/.gitignore ++++ b/src/protean/template/domain_template/.gitignore +@@ -159,4 +159,7 @@ cython_debug/ + # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore + # and can be added to the global gitignore or merged into this file. For a more nuclear + # option (not recommended) you can uncomment the following to ignore the entire idea folder. +-#.idea/ +\ No newline at end of file ++#.idea/ ++ ++# Mac OS ++.DS_Store +\ No newline at end of file +diff --git a/src/protean/template/domain_template/src/{{package_name}}/domain.py.jinja b/src/protean/template/domain_template/src/{{package_name}}/domain.py.jinja +index 3b3d48d..a9baa1e 100644 +--- a/src/protean/template/domain_template/src/{{package_name}}/domain.py.jinja ++++ b/src/protean/template/domain_template/src/{{package_name}}/domain.py.jinja +@@ -1,7 +1,7 @@ + from protean.domain import Domain + + # Domain Composition Root +-{{ package_name }} = Domain(__file__, "{{ project_name }}") ++{{ package_name }} = Domain(__file__, "{{ package_name | capitalize }}") + + # Initialize and load all domain elements under the composition root + {{ package_name }}.init() +diff --git a/src/protean/template/domain_template/src/{{package_name}}/domain.toml.jinja b/src/protean/template/domain_template/src/{{package_name}}/domain.toml.jinja +index 583b220..f8e7e14 100644 +--- a/src/protean/template/domain_template/src/{{package_name}}/domain.toml.jinja ++++ b/src/protean/template/domain_template/src/{{package_name}}/domain.toml.jinja +@@ -1,6 +1,6 @@ + debug = true + testing = true +-secret_key = ${SECRET_KEY|{{ 999999999999999999999999999999999|ans_random|hash('sha256') }}} ++secret_key = "${SECRET_KEY|{{ 999999999999999999999999999999999|ans_random|hash('sha256') }}}" + identity_strategy = "uuid" + identity_type = "string" + event_processing = "sync" +@@ -12,11 +12,6 @@ provider = "memory" + [databases.memory] + provider = "memory" + +-[databases.sqlite] +-provider = "sqlalchemy" +-database = "sqlite" +-database_uri = "sqlite:///test.db" +- + [brokers.default] + provider = "inline" + +diff --git a/src/protean/utils/__init__.py b/src/protean/utils/__init__.py +index de21635..e94b2b1 100644 +--- a/src/protean/utils/__init__.py ++++ b/src/protean/utils/__init__.py +@@ -12,7 +12,7 @@ from datetime import UTC, datetime + from enum import Enum + from uuid import uuid4 + +-from protean.exceptions import ConfigurationError ++from protean.exceptions import ConfigurationError, IncorrectUsageError + from protean.globals import current_domain + + logger = logging.getLogger(__name__) +@@ -132,6 +132,57 @@ class DomainObjects(Enum): + VIEW = "VIEW" + + ++def base_classes()->list: ++ return [ ++ ++ ] ++ ++ ++def factory_for(domain_object_type) -> callable: ++ from protean.core.aggregate import aggregate_factory ++ from protean.core.application_service import application_service_factory ++ from protean.core.command import command_factory ++ from protean.core.command_handler import command_handler_factory ++ from protean.core.domain_service import domain_service_factory ++ from protean.core.email import email_factory ++ from protean.core.entity import entity_factory ++ from protean.core.event import domain_event_factory ++ from protean.core.event_handler import event_handler_factory ++ from protean.core.event_sourced_aggregate import event_sourced_aggregate_factory ++ from protean.core.model import model_factory ++ from protean.core.repository import repository_factory ++ from protean.core.serializer import serializer_factory ++ from protean.core.subscriber import subscriber_factory ++ from protean.core.value_object import value_object_factory ++ from protean.core.view import view_factory ++ ++ factories = { ++ DomainObjects.AGGREGATE.value: aggregate_factory, ++ DomainObjects.APPLICATION_SERVICE.value: application_service_factory, ++ DomainObjects.COMMAND.value: command_factory, ++ DomainObjects.COMMAND_HANDLER.value: command_handler_factory, ++ DomainObjects.EVENT.value: domain_event_factory, ++ DomainObjects.EVENT_HANDLER.value: event_handler_factory, ++ DomainObjects.EVENT_SOURCED_AGGREGATE.value: event_sourced_aggregate_factory, ++ DomainObjects.DOMAIN_SERVICE.value: domain_service_factory, ++ DomainObjects.EMAIL.value: email_factory, ++ DomainObjects.ENTITY.value: entity_factory, ++ DomainObjects.MODEL.value: model_factory, ++ DomainObjects.REPOSITORY.value: repository_factory, ++ DomainObjects.SUBSCRIBER.value: subscriber_factory, ++ DomainObjects.SERIALIZER.value: serializer_factory, ++ DomainObjects.VALUE_OBJECT.value: value_object_factory, ++ DomainObjects.VIEW.value: view_factory, ++ } ++ ++ if domain_object_type.value not in factories: ++ raise IncorrectUsageError( ++ {"_entity": [f"Unknown Element Type `{domain_object_type.value}`"]} ++ ) ++ ++ return factories[domain_object_type.value] ++ ++ + def derive_element_class(element_cls, base_cls, **opts): + if not issubclass(element_cls, base_cls): + try: +@@ -188,4 +239,5 @@ __all__ = [ + "import_from_full_path", + "singleton", + "utcnow_func", ++ "factory_for" + ] +diff --git a/tests/container/test_options.py b/tests/container/test_options.py +index 14a881a..7f81d8f 100644 +--- a/tests/container/test_options.py ++++ b/tests/container/test_options.py +@@ -1,5 +1,6 @@ +-from protean.container import Options + from protean import BaseAggregate ++from protean.container import Options ++from protean.fields import String + + + class Meta: +@@ -13,14 +14,12 @@ class TestOptionsStandalone: + assert opts is not None + assert opts.foo == "bar" + +- + def test_options_construction_from_dict(self): + opts = Options({"foo": "bar"}) + + assert opts is not None + assert opts.foo == "bar" + +- + def test_tracking_currently_active_attributes(self): + opts = Options({"foo": "bar"}) + assert opts._opts == {"abstract", "foo"} +@@ -34,7 +33,6 @@ class TestOptionsStandalone: + del opts.baz + assert opts._opts == {"abstract", "foo", "waldo"} + +- + def test_option_objects_equality(self): + assert Options() == Options() + assert Options(Meta) == Options({"foo": "bar"}) +@@ -52,7 +50,6 @@ class TestOptionsStandalone: + + assert Options(Meta) != Options(Meta3) + +- + def test_merging_two_option_objects(self): + opt1 = Options({"foo": "bar", "baz": "qux"}) + opt2 = Options({"baz": "quz"}) +@@ -67,8 +64,10 @@ class TestOptionsStandalone: + class TestOptionsInElements: + def test_options_from_meta_class_in_element(self): + class Foo(BaseAggregate): ++ bar = String() ++ + class Meta: +- foo = "bar" ++ foo = "baz" + +- assert Foo.meta_.foo == "bar" +- assert Foo.meta_.abstract is False +\ No newline at end of file ++ assert Foo.meta_.foo == "baz" ++ assert Foo.meta_.abstract is False +diff --git a/tests/test_aggregates.py b/tests/test_aggregates.py +index 0fd53fd..6240af7 100644 +--- a/tests/test_aggregates.py ++++ b/tests/test_aggregates.py +@@ -64,28 +64,10 @@ class TestAggregateIdentity: + + class Meta: + abstract = True ++ auto_add_id_field = False + + assert "id" not in declared_fields(TimeStamped) + +- def test_that_abstract_aggregates_cannot_have_a_declared_id_field( +- self, test_domain +- ): +- with pytest.raises(IncorrectUsageError) as exception: +- +- @test_domain.aggregate +- class User(BaseAggregate): +- email = String(identifier=True) +- name = String(max_length=55) +- +- class Meta: +- abstract = True +- +- assert exception.value.messages == { +- "_entity": [ +- "Abstract Aggregate `User` marked as abstract cannot have identity fields" +- ] +- } +- + + class TestAggregateMeta: + class TestAggregateMetaInClassDefinition: + + + + + + + +{{ package_name }} = Domain(__file__, "{{ package_name | capitalize }}") diff --git a/poetry.lock b/poetry.lock index 2bf2836e..b9fcb5c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2439,81 +2439,89 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.52" +version = "2.0.30" description = "Database Abstraction Library" optional = true -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "SQLAlchemy-1.4.52-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:f68016f9a5713684c1507cc37133c28035f29925c75c0df2f9d0f7571e23720a"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24bb0f81fbbb13d737b7f76d1821ec0b117ce8cbb8ee5e8641ad2de41aa916d3"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e93983cc0d2edae253b3f2141b0a3fb07e41c76cd79c2ad743fc27eb79c3f6db"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:84e10772cfc333eb08d0b7ef808cd76e4a9a30a725fb62a0495877a57ee41d81"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:427988398d2902de042093d17f2b9619a5ebc605bf6372f7d70e29bde6736842"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win32.whl", hash = "sha256:1296f2cdd6db09b98ceb3c93025f0da4835303b8ac46c15c2136e27ee4d18d94"}, - {file = "SQLAlchemy-1.4.52-cp310-cp310-win_amd64.whl", hash = "sha256:80e7f697bccc56ac6eac9e2df5c98b47de57e7006d2e46e1a3c17c546254f6ef"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2f251af4c75a675ea42766880ff430ac33291c8d0057acca79710f9e5a77383d"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb8f9e4c4718f111d7b530c4e6fb4d28f9f110eb82e7961412955b3875b66de0"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afb1672b57f58c0318ad2cff80b384e816735ffc7e848d8aa51e0b0fc2f4b7bb"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win32.whl", hash = "sha256:6e41cb5cda641f3754568d2ed8962f772a7f2b59403b95c60c89f3e0bd25f15e"}, - {file = "SQLAlchemy-1.4.52-cp311-cp311-win_amd64.whl", hash = "sha256:5bed4f8c3b69779de9d99eb03fd9ab67a850d74ab0243d1be9d4080e77b6af12"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:49e3772eb3380ac88d35495843daf3c03f094b713e66c7d017e322144a5c6b7c"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:618827c1a1c243d2540314c6e100aee7af09a709bd005bae971686fab6723554"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9acf369aaadb71a725b7e83a5ef40ca3de1cf4cdc93fa847df6b12d3cd924b"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win32.whl", hash = "sha256:763bd97c4ebc74136ecf3526b34808c58945023a59927b416acebcd68d1fc126"}, - {file = "SQLAlchemy-1.4.52-cp312-cp312-win_amd64.whl", hash = "sha256:f12aaf94f4d9679ca475975578739e12cc5b461172e04d66f7a3c39dd14ffc64"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:853fcfd1f54224ea7aabcf34b227d2b64a08cbac116ecf376907968b29b8e763"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f98dbb8fcc6d1c03ae8ec735d3c62110949a3b8bc6e215053aa27096857afb45"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e135fff2e84103bc15c07edd8569612ce317d64bdb391f49ce57124a73f45c5"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b5de6af8852500d01398f5047d62ca3431d1e29a331d0b56c3e14cb03f8094c"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3491c85df263a5c2157c594f54a1a9c72265b75d3777e61ee13c556d9e43ffc9"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win32.whl", hash = "sha256:427c282dd0deba1f07bcbf499cbcc9fe9a626743f5d4989bfdfd3ed3513003dd"}, - {file = "SQLAlchemy-1.4.52-cp36-cp36m-win_amd64.whl", hash = "sha256:ca5ce82b11731492204cff8845c5e8ca1a4bd1ade85e3b8fcf86e7601bfc6a39"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:29d4247313abb2015f8979137fe65f4eaceead5247d39603cc4b4a610936cd2b"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a752bff4796bf22803d052d4841ebc3c55c26fb65551f2c96e90ac7c62be763a"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7ea11727feb2861deaa293c7971a4df57ef1c90e42cb53f0da40c3468388000"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d913f8953e098ca931ad7f58797f91deed26b435ec3756478b75c608aa80d139"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a251146b921725547ea1735b060a11e1be705017b568c9f8067ca61e6ef85f20"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win32.whl", hash = "sha256:1f8e1c6a6b7f8e9407ad9afc0ea41c1f65225ce505b79bc0342159de9c890782"}, - {file = "SQLAlchemy-1.4.52-cp37-cp37m-win_amd64.whl", hash = "sha256:346ed50cb2c30f5d7a03d888e25744154ceac6f0e6e1ab3bc7b5b77138d37710"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4dae6001457d4497736e3bc422165f107ecdd70b0d651fab7f731276e8b9e12d"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d2e08d79f5bf250afb4a61426b41026e448da446b55e4770c2afdc1e200fce"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbce5dd7c7735e01d24f5a60177f3e589078f83c8a29e124a6521b76d825b85"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bdb7b4d889631a3b2a81a3347c4c3f031812eb4adeaa3ee4e6b0d028ad1852b5"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c294ae4e6bbd060dd79e2bd5bba8b6274d08ffd65b58d106394cb6abbf35cf45"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win32.whl", hash = "sha256:bcdfb4b47fe04967669874fb1ce782a006756fdbebe7263f6a000e1db969120e"}, - {file = "SQLAlchemy-1.4.52-cp38-cp38-win_amd64.whl", hash = "sha256:7d0dbc56cb6af5088f3658982d3d8c1d6a82691f31f7b0da682c7b98fa914e91"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a551d5f3dc63f096ed41775ceec72fdf91462bb95abdc179010dc95a93957800"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux1_x86_64.manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_5_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ab773f9ad848118df7a9bbabca53e3f1002387cdbb6ee81693db808b82aaab0"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2de46f5d5396d5331127cfa71f837cca945f9a2b04f7cb5a01949cf676db7d1"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7027be7930a90d18a386b25ee8af30514c61f3852c7268899f23fdfbd3107181"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99224d621affbb3c1a4f72b631f8393045f4ce647dd3262f12fe3576918f8bf3"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win32.whl", hash = "sha256:c124912fd4e1bb9d1e7dc193ed482a9f812769cb1e69363ab68e01801e859821"}, - {file = "SQLAlchemy-1.4.52-cp39-cp39-win_amd64.whl", hash = "sha256:2c286fab42e49db23c46ab02479f328b8bdb837d3e281cae546cc4085c83b680"}, - {file = "SQLAlchemy-1.4.52.tar.gz", hash = "sha256:80e63bbdc5217dad3485059bdf6f65a7d43f33c8bde619df5c220edf03d87296"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b48154678e76445c7ded1896715ce05319f74b1e73cf82d4f8b59b46e9c0ddc"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2753743c2afd061bb95a61a51bbb6a1a11ac1c44292fad898f10c9839a7f75b2"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7bfc726d167f425d4c16269a9a10fe8630ff6d14b683d588044dcef2d0f6be7"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f61ada6979223013d9ab83a3ed003ded6959eae37d0d685db2c147e9143797"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a365eda439b7a00732638f11072907c1bc8e351c7665e7e5da91b169af794af"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bba002a9447b291548e8d66fd8c96a6a7ed4f2def0bb155f4f0a1309fd2735d5"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-win32.whl", hash = "sha256:0138c5c16be3600923fa2169532205d18891b28afa817cb49b50e08f62198bb8"}, + {file = "SQLAlchemy-2.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:99650e9f4cf3ad0d409fed3eec4f071fadd032e9a5edc7270cd646a26446feeb"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:955991a09f0992c68a499791a753523f50f71a6885531568404fa0f231832aa0"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f69e4c756ee2686767eb80f94c0125c8b0a0b87ede03eacc5c8ae3b54b99dc46"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69c9db1ce00e59e8dd09d7bae852a9add716efdc070a3e2068377e6ff0d6fdaa"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1429a4b0f709f19ff3b0cf13675b2b9bfa8a7e79990003207a011c0db880a13"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:efedba7e13aa9a6c8407c48facfdfa108a5a4128e35f4c68f20c3407e4376aa9"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16863e2b132b761891d6c49f0a0f70030e0bcac4fd208117f6b7e053e68668d0"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-win32.whl", hash = "sha256:2ecabd9ccaa6e914e3dbb2aa46b76dede7eadc8cbf1b8083c94d936bcd5ffb49"}, + {file = "SQLAlchemy-2.0.30-cp311-cp311-win_amd64.whl", hash = "sha256:0b3f4c438e37d22b83e640f825ef0f37b95db9aa2d68203f2c9549375d0b2260"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5a79d65395ac5e6b0c2890935bad892eabb911c4aa8e8015067ddb37eea3d56c"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9a5baf9267b752390252889f0c802ea13b52dfee5e369527da229189b8bd592e"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cb5a646930c5123f8461f6468901573f334c2c63c795b9af350063a736d0134"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:296230899df0b77dec4eb799bcea6fbe39a43707ce7bb166519c97b583cfcab3"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c62d401223f468eb4da32627bffc0c78ed516b03bb8a34a58be54d618b74d472"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3b69e934f0f2b677ec111b4d83f92dc1a3210a779f69bf905273192cf4ed433e"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-win32.whl", hash = "sha256:77d2edb1f54aff37e3318f611637171e8ec71472f1fdc7348b41dcb226f93d90"}, + {file = "SQLAlchemy-2.0.30-cp312-cp312-win_amd64.whl", hash = "sha256:b6c7ec2b1f4969fc19b65b7059ed00497e25f54069407a8701091beb69e591a5"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a8e3b0a7e09e94be7510d1661339d6b52daf202ed2f5b1f9f48ea34ee6f2d57"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b60203c63e8f984df92035610c5fb76d941254cf5d19751faab7d33b21e5ddc0"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1dc3eabd8c0232ee8387fbe03e0a62220a6f089e278b1f0aaf5e2d6210741ad"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:40ad017c672c00b9b663fcfcd5f0864a0a97828e2ee7ab0c140dc84058d194cf"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e42203d8d20dc704604862977b1470a122e4892791fe3ed165f041e4bf447a1b"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-win32.whl", hash = "sha256:2a4f4da89c74435f2bc61878cd08f3646b699e7d2eba97144030d1be44e27584"}, + {file = "SQLAlchemy-2.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:b6bf767d14b77f6a18b6982cbbf29d71bede087edae495d11ab358280f304d8e"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc0c53579650a891f9b83fa3cecd4e00218e071d0ba00c4890f5be0c34887ed3"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:311710f9a2ee235f1403537b10c7687214bb1f2b9ebb52702c5aa4a77f0b3af7"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:408f8b0e2c04677e9c93f40eef3ab22f550fecb3011b187f66a096395ff3d9fd"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37a4b4fb0dd4d2669070fb05b8b8824afd0af57587393015baee1cf9890242d9"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a943d297126c9230719c27fcbbeab57ecd5d15b0bd6bfd26e91bfcfe64220621"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a089e218654e740a41388893e090d2e2c22c29028c9d1353feb38638820bbeb"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-win32.whl", hash = "sha256:fa561138a64f949f3e889eb9ab8c58e1504ab351d6cf55259dc4c248eaa19da6"}, + {file = "SQLAlchemy-2.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:7d74336c65705b986d12a7e337ba27ab2b9d819993851b140efdf029248e818e"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae8c62fe2480dd61c532ccafdbce9b29dacc126fe8be0d9a927ca3e699b9491a"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2383146973a15435e4717f94c7509982770e3e54974c71f76500a0136f22810b"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8409de825f2c3b62ab15788635ccaec0c881c3f12a8af2b12ae4910a0a9aeef6"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0094c5dc698a5f78d3d1539853e8ecec02516b62b8223c970c86d44e7a80f6c7"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:edc16a50f5e1b7a06a2dcc1f2205b0b961074c123ed17ebda726f376a5ab0953"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f7703c2010355dd28f53deb644a05fc30f796bd8598b43f0ba678878780b6e4c"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-win32.whl", hash = "sha256:1f9a727312ff6ad5248a4367358e2cf7e625e98b1028b1d7ab7b806b7d757513"}, + {file = "SQLAlchemy-2.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:a0ef36b28534f2a5771191be6edb44cc2673c7b2edf6deac6562400288664221"}, + {file = "SQLAlchemy-2.0.30-py3-none-any.whl", hash = "sha256:7108d569d3990c71e26a42f60474b4c02c8586c4681af5fd67e51a044fdea86a"}, + {file = "SQLAlchemy-2.0.30.tar.gz", hash = "sha256:2b1708916730f4830bc69d6f49d37f7698b5bd7530aca7f04f785f8849e95255"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] -mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql", "pymysql (<1)"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] [[package]] @@ -2845,4 +2853,4 @@ sqlite = ["sqlalchemy"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "99ce7df751d2d41e76af5eb81a1cbf9f2ef503675d980e71197603397c2a5031" +content-hash = "a0fc12002c0d53233c0bb562c7216da67c99e47b01703f236b306bf1bdb2ef63" diff --git a/pyproject.toml b/pyproject.toml index 33b23df8..29c1e7dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,7 +66,7 @@ werkzeug = ">=2.0.0" elasticsearch = {version = "~7.17.9", optional = true} elasticsearch-dsl = {version = "~7.4.1", optional = true} redis = {version = "~3.5.2", optional = true} -sqlalchemy = {version = "~1.4.50", optional = true} +sqlalchemy = {version = "~2.0.30", optional = true} psycopg2 = {version = ">=2.9.9", optional = true} celery = { version = "~5.2.7", extras = ["redis"], optional = true} flask = {version = ">=1.1.1", optional = true} diff --git a/src/protean/adapters/repository/sqlalchemy.py b/src/protean/adapters/repository/sqlalchemy.py index 867deb55..0a8b56a5 100644 --- a/src/protean/adapters/repository/sqlalchemy.py +++ b/src/protean/adapters/repository/sqlalchemy.py @@ -5,18 +5,15 @@ import logging import uuid -from abc import ABCMeta from enum import Enum from typing import Any import sqlalchemy.dialects.postgresql as psql -from sqlalchemy import Column, MetaData, and_, create_engine, or_, orm +from sqlalchemy import Column, MetaData, and_, create_engine, or_, orm, text from sqlalchemy import types as sa_types from sqlalchemy.engine.url import make_url from sqlalchemy.exc import DatabaseError -from sqlalchemy.ext import declarative as sa_dec -from sqlalchemy.ext.declarative import as_declarative, declared_attr from sqlalchemy.types import CHAR, TypeDecorator from protean.core.value_object import BaseValueObject @@ -134,10 +131,21 @@ def _custom_json_dumps(value): return json.dumps(value, default=_default) -class DeclarativeMeta(sa_dec.DeclarativeMeta, ABCMeta): - """Metaclass for the Sqlalchemy declarative schema""" +def derive_schema_name(model_cls): + # Retain schema name if already present, otherwise derive from entity class + if ( + hasattr(model_cls.meta_, "schema_name") + and model_cls.meta_.schema_name is not None + ): + return model_cls.meta_.schema_name + else: + return model_cls.meta_.entity_cls.meta_.schema_name - def __init__(cls, classname, bases, dict_): # noqa: C901 + +class SqlalchemyModel(orm.DeclarativeBase, BaseModel): + """Model representation for the Sqlalchemy Database""" + + def __init_subclass__(subclass, **kwargs): # noqa: C901 field_mapping = { Boolean: sa_types.Boolean, Date: sa_types.Date, @@ -166,13 +174,13 @@ def field_mapping_for(field_obj: Field): return field_mapping.get(field_cls) # Update the class attrs with the entity attributes - if "meta_" in dict_: - entity_cls = dict_["meta_"].entity_cls + if "meta_" in subclass.__dict__: + entity_cls = subclass.__dict__["meta_"].entity_cls for _, field_obj in attributes(entity_cls).items(): attribute_name = field_obj.attribute_name # Map the field if not in attributes - if attribute_name not in cls.__dict__: + if attribute_name not in subclass.__dict__: # Derive field based on field enclosed within ShadowField if isinstance(field_obj, _ShadowField): field_obj = field_obj.field_obj @@ -185,7 +193,7 @@ def field_mapping_for(field_obj: Field): sa_type_cls = field_mapping_for(field_obj) # Upgrade to Postgresql specific Data Types - if cls.metadata.bind.dialect.name == "postgresql": + if subclass.__dict__["engine"].dialect.name == "postgresql": if field_cls == Dict and not field_obj.pickled: sa_type_cls = psql.JSON @@ -228,37 +236,12 @@ def field_mapping_for(field_obj: Field): type_kwargs["length"] = field_obj.max_length # Update the attributes of the class - # SQLAlchemy changed where `DeclarativeMeta.__init__` looks for class attributes - # between versions 1.3 and 1.4, and then fixated on `cls.__dict__` in 1.4.36. - # While the `setattr` below works with the latest SQLAlchemy version of 1.4.36, - # it's better to populate both `dict_` as well as `cls.__dict__` to be compatible - # with all 1.4.10+ versions of SQLAlchemy. - # More info: - # * https://github.com/sqlalchemy/sqlalchemy/issues/6791 - # * https://github.com/sqlalchemy/sqlalchemy/issues/7900 column = Column(sa_type_cls(*type_args, **type_kwargs), **col_args) - setattr(cls, attribute_name, column) # Set class attribute - dict_[attribute_name] = column # Set in dict_ - - super().__init__(classname, bases, dict_) + setattr(subclass, attribute_name, column) # Set class attribute + super().__init_subclass__(**kwargs) -def derive_schema_name(model_cls): - # Retain schema name if already present, otherwise derive from entity class - if ( - hasattr(model_cls.meta_, "schema_name") - and model_cls.meta_.schema_name is not None - ): - return model_cls.meta_.schema_name - else: - return model_cls.meta_.entity_cls.meta_.schema_name - - -@as_declarative(metaclass=DeclarativeMeta) -class SqlalchemyModel(BaseModel): - """Model representation for the Sqlalchemy Database""" - - @declared_attr + @orm.declared_attr def __tablename__(cls): return derive_schema_name(cls) @@ -516,7 +499,7 @@ def _raw(self, query: Any, data: Any = None): conn = self._get_session() try: - results = conn.execute(query) + results = conn.execute(text(query)) entity_items = [] for item in results: @@ -578,9 +561,9 @@ def __init__(self, *args, **kwargs): self.conn_info["SCHEMA"] if "SCHEMA" in self.conn_info else "public" ) - self._metadata = MetaData(bind=self._engine, schema=schema) + self._metadata = MetaData(schema=schema) else: - self._metadata = MetaData(bind=self._engine) + self._metadata = MetaData() # A temporary cache of already constructed model classes self._model_classes = {} @@ -604,7 +587,7 @@ def _get_database_specific_session_args(self): Return: a dictionary with additional arguments and values. """ if self.conn_info["database"] == self.databases.postgresql.value: - return {"autocommit": True, "autoflush": False} + return {"autoflush": False} return {} @@ -632,7 +615,7 @@ def _execute_database_specific_connection_statements(self, conn): Return: None """ if self.conn_info["database"] == self.databases.sqlite.value: - conn.execute("PRAGMA case_sensitive_like = ON;") + conn.execute(text("PRAGMA case_sensitive_like = ON;")) return conn @@ -655,13 +638,13 @@ def _data_reset(self): transaction = conn.begin() if self.conn_info["database"] == self.databases.sqlite.value: - conn.execute("PRAGMA foreign_keys = OFF;") + conn.execute(text("PRAGMA foreign_keys = OFF;")) for table in self._metadata.sorted_tables: conn.execute(table.delete()) if self.conn_info["database"] == self.databases.sqlite.value: - conn.execute("PRAGMA foreign_keys = ON;") + conn.execute(text("PRAGMA foreign_keys = ON;")) transaction.commit() @@ -673,10 +656,10 @@ def _create_database_artifacts(self): for _, aggregate_record in self.domain.registry.aggregates.items(): self.domain.repository_for(aggregate_record.cls)._dao - self._metadata.create_all() + self._metadata.create_all(self._engine) def _drop_database_artifacts(self): - self._metadata.drop_all() + self._metadata.drop_all(self._engine) self._metadata.clear() def decorate_model_class(self, entity_cls, model_cls): @@ -720,7 +703,9 @@ def decorate_model_class(self, entity_cls, model_cls): schema_name if meta_.schema_name is None else meta_.schema_name ) - custom_attrs.update({"meta_": meta_, "metadata": self._metadata}) + custom_attrs.update( + {"meta_": meta_, "engine": self._engine, "metadata": self._metadata} + ) # FIXME Ensure the custom model attributes are constructed properly decorated_model_cls = type( model_cls.__name__, (SqlalchemyModel, model_cls), custom_attrs @@ -752,6 +737,7 @@ def construct_model_class(self, entity_cls): attrs = { "meta_": meta_, + "engine": self._engine, "metadata": self._metadata, } # FIXME Ensure the custom model attributes are constructed properly @@ -774,7 +760,7 @@ def raw(self, query: Any, data: Any = None): assert isinstance(query, str) assert isinstance(data, (dict, None)) - return self.get_connection().execute(query, data) + return self.get_connection().execute(text(query), data) operators = { diff --git a/tests/adapters/model/sqlalchemy_model/postgresql/conftest.py b/tests/adapters/model/sqlalchemy_model/postgresql/conftest.py index 06af8d0b..64c9d26b 100644 --- a/tests/adapters/model/sqlalchemy_model/postgresql/conftest.py +++ b/tests/adapters/model/sqlalchemy_model/postgresql/conftest.py @@ -53,9 +53,11 @@ def setup_db(): domain.repository_for(ListUser)._dao domain.repository_for(IntegerListUser)._dao - domain.providers["default"]._metadata.create_all() + default_provider = domain.providers["default"] + default_provider._metadata.create_all(default_provider._engine) yield # Drop all tables at the end of test suite - domain.providers["default"]._metadata.drop_all() + default_provider = domain.providers["default"] + default_provider._metadata.drop_all(default_provider._engine) diff --git a/tests/adapters/model/sqlalchemy_model/postgresql/test_model.py b/tests/adapters/model/sqlalchemy_model/postgresql/test_model.py index 70f963cd..9a4476fb 100644 --- a/tests/adapters/model/sqlalchemy_model/postgresql/test_model.py +++ b/tests/adapters/model/sqlalchemy_model/postgresql/test_model.py @@ -114,8 +114,8 @@ class ReceiverInlineModel: test_domain.repository_for(Receiver)._dao # Registers and refreshes DB objects - provider = test_domain.providers["default"] - provider._metadata.create_all() + default_provider = test_domain.providers["default"] + default_provider._metadata.create_all(default_provider._engine) model_cls = test_domain.repository_for(Receiver)._model assert model_cls.__name__ == "ReceiverInlineModel" diff --git a/tests/adapters/model/sqlalchemy_model/sqlite/conftest.py b/tests/adapters/model/sqlalchemy_model/sqlite/conftest.py index cf897a29..b280a806 100644 --- a/tests/adapters/model/sqlalchemy_model/sqlite/conftest.py +++ b/tests/adapters/model/sqlalchemy_model/sqlite/conftest.py @@ -37,9 +37,11 @@ def setup_db(): domain.repository_for(Provider)._dao domain.repository_for(User)._dao - domain.providers["default"]._metadata.create_all() + default_provider = domain.providers["default"] + default_provider._metadata.create_all(default_provider._engine) yield # Drop all tables at the end of test suite - domain.providers["default"]._metadata.drop_all() + default_provider = domain.providers["default"] + default_provider._metadata.drop_all(default_provider._engine) diff --git a/tests/adapters/model/sqlalchemy_model/sqlite/test_model.py b/tests/adapters/model/sqlalchemy_model/sqlite/test_model.py index c6d72a7a..3328f6f2 100644 --- a/tests/adapters/model/sqlalchemy_model/sqlite/test_model.py +++ b/tests/adapters/model/sqlalchemy_model/sqlite/test_model.py @@ -114,7 +114,8 @@ class ReceiverInlineModel: test_domain.repository_for(Receiver)._dao - test_domain.providers["default"]._metadata.create_all() + default_provider = test_domain.providers["default"] + default_provider._metadata.create_all(default_provider._engine) model_cls = test_domain.repository_for(Receiver)._model assert model_cls.__name__ == "ReceiverInlineModel" diff --git a/tests/adapters/repository/sqlalchemy_repo/postgresql/conftest.py b/tests/adapters/repository/sqlalchemy_repo/postgresql/conftest.py index 1133cadb..19a31966 100644 --- a/tests/adapters/repository/sqlalchemy_repo/postgresql/conftest.py +++ b/tests/adapters/repository/sqlalchemy_repo/postgresql/conftest.py @@ -43,9 +43,11 @@ def setup_db(): domain.repository_for(Customer)._dao domain.repository_for(Order)._dao - domain.providers["default"]._metadata.create_all() + default_provider = domain.providers["default"] + default_provider._metadata.create_all(default_provider._engine) yield # Drop all tables at the end of test suite - domain.providers["default"]._metadata.drop_all() + default_provider = domain.providers["default"] + default_provider._metadata.drop_all(default_provider._engine) diff --git a/tests/adapters/repository/sqlalchemy_repo/sqlite/conftest.py b/tests/adapters/repository/sqlalchemy_repo/sqlite/conftest.py index 8b11b67c..ddad6b28 100644 --- a/tests/adapters/repository/sqlalchemy_repo/sqlite/conftest.py +++ b/tests/adapters/repository/sqlalchemy_repo/sqlite/conftest.py @@ -28,9 +28,11 @@ def setup_db(): domain.repository_for(User)._dao domain.repository_for(ComplexUser)._dao - domain.providers["default"]._metadata.create_all() + default_provider = domain.providers["default"] + default_provider._metadata.create_all(default_provider._engine) yield # Drop all tables at the end of test suite - domain.providers["default"]._metadata.drop_all() + default_provider = domain.providers["default"] + default_provider._metadata.drop_all(default_provider._engine)