diff --git a/src/protean/core/entity.py b/src/protean/core/entity.py index 71e4ad7e..a7385c22 100644 --- a/src/protean/core/entity.py +++ b/src/protean/core/entity.py @@ -71,7 +71,7 @@ def query(cls): when invoked like `Dog.query.all()` directly. A new query, and a corresponding `Pagination` result would be created every time. """ - return QuerySet(cls.__name__) + return QuerySet(cls) def _load_base_class_fields(new_class, bases, attrs): """If this class is subclassing another Entity, add that Entity's @@ -165,711 +165,711 @@ def auto_fields(self): if isinstance(field_obj, Auto)] -class QuerySet: - """A chainable class to gather a bunch of criteria and preferences (page size, order etc.) - before execution. +class EntityStateFieldsCacheDescriptor: + def __get__(self, instance, cls=None): + if instance is None: + return self + res = instance.fields_cache = {} + return res - Internally, a QuerySet can be constructed, filtered, sliced, and generally passed around - without actually fetching data. No data fetch actually occurs until you do something - to evaluate the queryset. - Once evaluated, a `QuerySet` typically caches its results. If the data in the database - might have changed, you can get updated results for the same query by calling `all()` on a - previously evaluated `QuerySet`. +class EntityState: + """Store entity instance state.""" - Attributes: - page: The current page number of the records to be pulled - per_page: The size of each page of the records to be pulled - order_by: The list of parameters to be used for ordering the results. - Use a `-` before the parameter name to sort in descending order - and if not ascending order. - excludes_: Objects with these properties will be excluded from the results - filters: Filter criteria + def __init__(self): + self._new = True + self._changed = False + self._destroyed = False - :return Returns a `Pagination` object that holds the query results - """ + @property + def is_new(self): + return self._new - def __init__(self, entity_cls_name: str, criteria=None, page: int = 1, per_page: int = 10, - order_by: set = None): - """Initialize either with empty preferences (when invoked on an Entity) - or carry forward filters and preferences when chained - """ + @property + def is_persisted(self): + return not self._new - self._entity_cls_name = entity_cls_name - self._criteria = criteria or Q() - self._result_cache = None - self._page = page or 1 - self._per_page = per_page or 10 + @property + def is_changed(self): + return self._changed - # `order_by` could be empty, or a string or a set. - # Intialize empty set if `order_by` is None - # Convert string to set if `order_by` is a String - # Safe-cast set to a set if `order_by` is already a set - if order_by: - self._order_by = set([order_by]) if isinstance(order_by, str) else set(order_by) - else: - self._order_by = set() + @property + def is_destroyed(self): + return self._destroyed - def _clone(self): - """ - Return a copy of the current QuerySet. - """ - clone = self.__class__(self._entity_cls_name, criteria=self._criteria, - page=self._page, per_page=self._per_page, - order_by=self._order_by) - return clone + def mark_saved(self): + self._new = False + self._changed = False - def _add_q(self, q_object): - """Add a Q-object to the current filter.""" - self._criteria = self._criteria._combine(q_object, q_object.connector) + mark_retrieved = mark_saved # Alias as placeholder so that future change wont affect interface - def filter(self, *args, **kwargs): - """ - Return a new QuerySet instance with the args ANDed to the existing - set. - """ - return self._filter_or_exclude(False, *args, **kwargs) + def mark_changed(self): + if not (self._new or self._destroyed): + self._changed = True - def exclude(self, *args, **kwargs): - """ - Return a new QuerySet instance with NOT (args) ANDed to the existing - set. - """ - return self._filter_or_exclude(True, *args, **kwargs) + def mark_destroyed(self): + self._destroyed = True + self._changed = False - def _filter_or_exclude(self, negate, *args, **kwargs): - clone = self._clone() - if negate: - clone._add_q(~Q(*args, **kwargs)) - else: - clone._add_q(Q(*args, **kwargs)) - return clone + fields_cache = EntityStateFieldsCacheDescriptor() - def paginate(self, **page_args): - """Update page preferences for query""" - clone = self._clone() - if 'page' in page_args and isinstance(page_args['page'], int): - clone._page = page_args['page'] - if 'per_page' in page_args and isinstance(page_args['per_page'], int): - clone._per_page = page_args['per_page'] - return clone +class Entity(metaclass=EntityBase): + """The Base class for Protean-Compliant Domain Entities. - def order_by(self, order_by: Union[set, str]): - """Update page setting for filter set""" - clone = self._clone() - if isinstance(order_by, str): - order_by = {order_by} + Provides helper methods to custom define entity attributes, and query attribute names + during runtime. - clone._order_by = clone._order_by.union(order_by) + Basic Usage:: - return clone + class Dog(Entity): + id = field.Integer(identifier=True) + name = field.String(required=True, max_length=50) + age = field.Integer(default=5) + owner = field.String(required=True, max_length=15) - def _retrieve_model(self): - """Retrieve model details associated with this Entity""" - # Fetch Model class and connected repository from Repository Factory - model_cls = repo_factory.get_model(self._entity_cls_name) - repository = getattr(repo_factory, self._entity_cls_name) + During persistence, the model associated with this entity is retrieved dynamically from + the repository factory. Model is usually initialized with a live DB connection. + """ - return (model_cls, repository) + class Meta: + """Options object for an Entity. - def all(self): - """Primary method to fetch data based on filters + Acts as a placeholder for generated entity fields like: - Also trigged when the QuerySet is evaluated by calling one of the following methods: - * len() - * bool() - * list() - * Iteration - * Slicing + :declared_fields: dict + Any instances of `Field` included as attributes on either the class + or on any of its superclasses will be include in this dictionary. + :id_field: protean.core.Field + An instance of the field that will serve as the unique identifier for the entity """ - logger.debug(f'Query `{self.__class__.__name__}` objects with filters {self}') - # Destroy any cached results - self._result_cache = None + def __init__(self, *template, **kwargs): + """ + Initialise the entity object. - # Fetch Model class and connected repository from Repository Factory - model_cls, repository = self._retrieve_model() + During initialization, set value on fields if vaidation passes. - # order_by clause must be list of keys - order_by = model_cls.opts_.order_by if not self._order_by else self._order_by + This initialization technique supports keyword arguments as well as dictionaries. You + can even use a template for initial data. + """ - # Call the read method of the repository - results = repository._filter_objects(self._criteria, self._page, self._per_page, order_by) + self.errors = {} - # Convert the returned results to entity and return it - entity_items = [] - for item in results.items: - entity = model_cls.to_entity(item) - entity.state_.mark_retrieved() - entity_items.append(entity) - results.items = entity_items + # Set up the storage for instance state + self.state_ = EntityState() - # Cache results - self._result_cache = results + # Load the attributes based on the template + loaded_fields = [] + for dictionary in template: + if not isinstance(dictionary, dict): + raise AssertionError( + f'Positional argument "{dictionary}" passed must be a dict.' + f'This argument serves as a template for loading common ' + f'values.' + ) + for field_name, val in dictionary.items(): + loaded_fields.append(field_name) + setattr(self, field_name, val) - return results + # Now load against the keyword arguments + for field_name, val in kwargs.items(): + loaded_fields.append(field_name) + setattr(self, field_name, val) - def update(self, *data, **kwargs): - """Updates all objects with details given if they match a set of conditions supplied. + # Now load the remaining fields with a None value, which will fail + # for required fields + for field_name, field_obj in self.meta_.declared_fields.items(): + if field_name not in loaded_fields: + if not isinstance(field_obj, (Reference, ReferenceField)): + setattr(self, field_name, None) - This method updates each object individually, to fire callback methods and ensure - validations are run. + # Raise any errors found during load + if self.errors: + raise ValidationError(self.errors) - Returns the number of objects matched (which may not be equal to the number of objects - updated if objects rows already have the new value). + def _update_data(self, *data_dict, **kwargs): """ - updated_item_count = 0 - try: - items = self.all() - - for item in items: - item.update(*data, **kwargs) - updated_item_count += 1 - except Exception as exc: - # FIXME Log Exception - raise + A private method to process and update entity values correctly. - return updated_item_count + :param data: A dictionary of values to be updated for the entity + :param kwargs: keyword arguments with key-value pairs to be updated + """ - def delete(self): - """Deletes matching objects from the Repository + # Load each of the fields given in the data dictionary + self.errors = {} - Does not throw error if no objects are matched. + for data in data_dict: + if not isinstance(data, dict): + raise AssertionError( + f'Positional argument "{data}" passed must be a dict.' + f'This argument serves as a template for loading common ' + f'values.' + ) + for field_name, val in data.items(): + setattr(self, field_name, val) - Returns the number of objects matched (which may not be equal to the number of objects - deleted if objects rows already have the new value). - """ - # Fetch Model class and connected repository from Repository Factory - deleted_item_count = 0 - try: - items = self.all() + # Now load against the keyword arguments + for field_name, val in kwargs.items(): + setattr(self, field_name, val) - for item in items: - item.delete() - deleted_item_count += 1 - except Exception as exc: - # FIXME Log Exception - raise + # Raise any errors found during update + if self.errors: + raise ValidationError(self.errors) - return deleted_item_count + def to_dict(self): + """ Return entity data as a dictionary """ + return {field_name: getattr(self, field_name, None) + for field_name in self.meta_.declared_fields} - def update_all(self, *args, **kwargs): - """Updates all objects with details given if they match a set of conditions supplied. + @classmethod + def _retrieve_model(cls): + """Retrieve model details associated with this Entity""" + from protean.core.repository import repo_factory # FIXME Move to a better placement - This method forwards filters and updates directly to the repository. It does not - instantiate entities and it does not trigger Entity callbacks or validations. + # Fetch Model class and connected repository from Repository Factory + model_cls = repo_factory.get_model(cls) + repository = repo_factory.get_repository(cls) - Update values can be specified either as a dict, or keyword arguments. + return (model_cls, repository) - Returns the number of objects matched (which may not be equal to the number of objects - updated if objects rows already have the new value). - """ - updated_item_count = 0 - _, repository = self._retrieve_model() - try: - updated_item_count = repository._update_all_objects(self._criteria, *args, **kwargs) - except Exception as exc: - # FIXME Log Exception - raise + def clone(self): + """Deepclone the entity, but reset state""" + clone_copy = copy.deepcopy(self) + clone_copy.state_ = EntityState() - return updated_item_count + return clone_copy - def delete_all(self, *args, **kwargs): - """Deletes objects that match a set of conditions supplied. + ###################### + # Life-cycle methods # + ###################### - This method forwards filters directly to the repository. It does not instantiate entities and - it does not trigger Entity callbacks or validations. + @classmethod + def get(cls, identifier: Any) -> 'Entity': + """Get a specific Record from the Repository - Returns the number of objects matched and deleted. + :param identifier: id of the record to be fetched from the repository. """ - deleted_item_count = 0 - _, repository = self._retrieve_model() - try: - deleted_item_count = repository._delete_all_objects(self._criteria) - except Exception as exc: - # FIXME Log Exception - raise - - return deleted_item_count + logger.debug(f'Lookup `{cls.__name__}` object with identifier {identifier}') + # Get the ID field for the entity + filters = { + cls.meta_.id_field.field_name: identifier + } - ############################### - # Python Magic method support # - ############################### + # Find this item in the repository or raise Error + results = cls.query.filter(**filters).paginate(page=1, per_page=1).all() + if not results: + raise ObjectNotFoundError( + f'`{cls.__name__}` object with identifier {identifier} ' + f'does not exist.') - def __iter__(self): - """Return results on iteration""" - if self._result_cache: - return iter(self._result_cache) + # Return the first result + return results.first - return iter(self.all()) + @classmethod + def find_by(cls, **kwargs) -> 'Entity': + """Find a specific entity record that matches one or more criteria. - def __len__(self): - """Return length of results""" - if self._result_cache: - return self._result_cache.total + :param kwargs: named arguments consisting of attr_name and attr_value pairs to search on + """ + logger.debug(f'Lookup `{cls.__name__}` object with values ' + f'{kwargs}') - return self.all().total + # Find this item in the repository or raise Error + results = cls.query.filter(**kwargs).paginate(page=1, per_page=1).all() - def __bool__(self): - """Return True if query results have items""" - if self._result_cache: - return bool(self._result_cache) + if not results: + raise ObjectNotFoundError( + f'`{cls.__name__}` object with values {[item for item in kwargs.items()]} ' + f'does not exist.') - return bool(self.all()) + # Return the first result + return results.first - def __repr__(self): - """Support friendly print of query criteria""" - return ("<%s: entity: %s, criteria: %s, page: %s, per_page: %s, order_by: %s>" % - (self.__class__.__name__, self._entity_cls_name, - self._criteria.deconstruct(), - self._page, self._per_page, self._order_by)) + @classmethod + def exists(cls, excludes_, **filters): + """ Return `True` if objects matching the provided filters and excludes + exist if not return false. - def __getitem__(self, k): - """Support slicing of results""" - if self._result_cache: - return self._result_cache.items[k] + Calls the `filter` method by default, but can be overridden for better and + quicker implementations that may be supported by a database. - return self.all().items[k] + :param excludes_: entities without this combination of field name and + values will be returned + """ + results = cls.query.filter(**filters).exclude(**excludes_) + return bool(results) - ######################### - # Pagination properties # - ######################### + @classmethod + def create(cls, *args, **kwargs) -> 'Entity': + """Create a new record in the repository. - @property - def total(self): - """Return the total number of records""" - if self._result_cache: - return self._result_cache.total + Also performs unique validations before creating the entity - return self.all().total + :param args: positional arguments for the entity + :param kwargs: keyword arguments for the entity + """ + logger.debug( + f'Creating new `{cls.__name__}` object using data {kwargs}') - @property - def items(self): - """Return result values""" - if self._result_cache: - return self._result_cache.items + model_cls, repository = cls._retrieve_model() - return self.all().items + try: + # Build the entity from the input arguments + # Raises validation errors, if any, at this point + entity = cls(*args, **kwargs) - @property - def first(self): - """Return the first result""" - if self._result_cache: - return self._result_cache.first + # Do unique checks, create this object and return it + entity._validate_unique() - return self.all().first + # Build the model object and create it + model_obj = repository._create_object(model_cls.from_entity(entity)) - @property - def has_next(self): - """Return True if there are more values present""" - if self._result_cache: - return self._result_cache.has_next + # Update the auto fields of the entity + for field_name, field_obj in entity.meta_.declared_fields.items(): + if isinstance(field_obj, Auto): + if isinstance(model_obj, dict): + field_val = model_obj[field_name] + else: + field_val = getattr(model_obj, field_name) + setattr(entity, field_name, field_val) - return self.all().has_next + # Set Entity status to saved + entity.state_.mark_saved() - @property - def has_prev(self): - """Return True if there are previous values present""" - if self._result_cache: - return self._result_cache.has_prev + return entity + except ValidationError as exc: + # FIXME Log Exception + raise - return self.all().has_prev + def save(self): + """Save a new Entity into repository. + Performs unique validations before creating the entity. + """ + logger.debug( + f'Saving `{self.__class__.__name__}` object') -class EntityStateFieldsCacheDescriptor: - def __get__(self, instance, cls=None): - if instance is None: - return self - res = instance.fields_cache = {} - return res + # Fetch Model class and connected repository from Repository Factory + model_cls, repository = self.__class__._retrieve_model() + try: + # Do unique checks, update the record and return the Entity + self._validate_unique(create=False) -class EntityState: - """Store entity instance state.""" + # Build the model object and create it + model_obj = repository._create_object(model_cls.from_entity(self)) - def __init__(self): - self._new = True - self._changed = False - self._destroyed = False + # Update the auto fields of the entity + for field_name, field_obj in self.meta_.declared_fields.items(): + if isinstance(field_obj, Auto): + if isinstance(model_obj, dict): + field_val = model_obj[field_name] + else: + field_val = getattr(model_obj, field_name) + setattr(self, field_name, field_val) - @property - def is_new(self): - return self._new + # Set Entity status to saved + self.state_.mark_saved() - @property - def is_persisted(self): - return not self._new + return self + except Exception as exc: + # FIXME Log Exception + raise - @property - def is_changed(self): - return self._changed + def update(self, *data, **kwargs) -> 'Entity': + """Update a Record in the repository. - @property - def is_destroyed(self): - return self._destroyed + Also performs unique validations before creating the entity. - def mark_saved(self): - self._new = False - self._changed = False + Supports both dictionary and keyword argument updates to the entity:: - mark_retrieved = mark_saved # Alias as placeholder so that future change wont affect interface + dog.update({'age': 10}) - def mark_changed(self): - if not (self._new or self._destroyed): - self._changed = True + dog.update(age=10) - def mark_destroyed(self): - self._destroyed = True - self._changed = False + :param data: Dictionary of values to be updated for the entity + :param kwargs: keyword arguments with key-value pairs to be updated + """ + logger.debug(f'Updating existing `{self.__class__.__name__}` object with id {self.id}') - fields_cache = EntityStateFieldsCacheDescriptor() + # Fetch Model class and connected repository from Repository Factory + model_cls, repository = self.__class__._retrieve_model() + try: + # Update entity's data attributes + self._update_data(*data, **kwargs) -class Entity(metaclass=EntityBase): - """The Base class for Protean-Compliant Domain Entities. + # Do unique checks, update the record and return the Entity + self._validate_unique(create=False) + repository._update_object(model_cls.from_entity(self)) - Provides helper methods to custom define entity attributes, and query attribute names - during runtime. + # Set Entity status to saved + self.state_.mark_saved() - Basic Usage:: + return self + except Exception as exc: + # FIXME Log Exception + raise - class Dog(Entity): - id = field.Integer(identifier=True) - name = field.String(required=True, max_length=50) - age = field.Integer(default=5) - owner = field.String(required=True, max_length=15) + def _validate_unique(self, create=True): + """ Validate the unique constraints for the entity """ + # Fetch Model class and connected-repository from Repository Factory + model_cls, _ = self.__class__._retrieve_model() - During persistence, the model associated with this entity is retrieved dynamically from - the repository factory. Model is usually initialized with a live DB connection. - """ + # Build the filters from the unique constraints + filters, excludes = {}, {} - class Meta: - """Options object for an Entity. + for field_name, field_obj in self.meta_.unique_fields: + lookup_value = getattr(self, field_name, None) + # Ignore empty lookup values + if lookup_value in Field.empty_values: + continue + # Ignore identifiers on updates + if not create and field_obj.identifier: + excludes[field_name] = lookup_value + continue + filters[field_name] = lookup_value - Acts as a placeholder for generated entity fields like: + # Lookup the objects by the filters and raise error on results + for filter_key, lookup_value in filters.items(): + if self.exists(excludes, **{filter_key: lookup_value}): + field_obj = self.meta_.declared_fields[filter_key] + field_obj.fail('unique', + model_name=model_cls.opts_.model_name, + field_name=filter_key) - :declared_fields: dict - Any instances of `Field` included as attributes on either the class - or on any of its superclasses will be include in this dictionary. - :id_field: protean.core.Field - An instance of the field that will serve as the unique identifier for the entity - """ + def delete(self): + """Delete a Record from the Repository - def __init__(self, *template, **kwargs): + will perform callbacks and run validations before deletion. + + Throws ObjectNotFoundError if the object was not found in the repository. """ - Initialise the entity object. + # Fetch Model class and connected repository from Repository Factory + model_cls, repository = self.__class__._retrieve_model() - During initialization, set value on fields if vaidation passes. + try: + if not self.state_.is_destroyed: + # Update entity's data attributes + repository._delete_object(model_cls.from_entity(self)) - This initialization technique supports keyword arguments as well as dictionaries. You - can even use a template for initial data. - """ + # Set Entity status to saved + self.state_.mark_destroyed() - self.errors = {} + return self + except Exception as exc: + # FIXME Log Exception + raise - # Set up the storage for instance state - self.state_ = EntityState() - # Load the attributes based on the template - loaded_fields = [] - for dictionary in template: - if not isinstance(dictionary, dict): - raise AssertionError( - f'Positional argument "{dictionary}" passed must be a dict.' - f'This argument serves as a template for loading common ' - f'values.' - ) - for field_name, val in dictionary.items(): - loaded_fields.append(field_name) - setattr(self, field_name, val) +class QuerySet: + """A chainable class to gather a bunch of criteria and preferences (page size, order etc.) + before execution. - # Now load against the keyword arguments - for field_name, val in kwargs.items(): - loaded_fields.append(field_name) - setattr(self, field_name, val) + Internally, a QuerySet can be constructed, filtered, sliced, and generally passed around + without actually fetching data. No data fetch actually occurs until you do something + to evaluate the queryset. - # Now load the remaining fields with a None value, which will fail - # for required fields - for field_name, field_obj in self.meta_.declared_fields.items(): - if field_name not in loaded_fields: - if not isinstance(field_obj, (Reference, ReferenceField)): - setattr(self, field_name, None) + Once evaluated, a `QuerySet` typically caches its results. If the data in the database + might have changed, you can get updated results for the same query by calling `all()` on a + previously evaluated `QuerySet`. - # Raise any errors found during load - if self.errors: - raise ValidationError(self.errors) + Attributes: + page: The current page number of the records to be pulled + per_page: The size of each page of the records to be pulled + order_by: The list of parameters to be used for ordering the results. + Use a `-` before the parameter name to sort in descending order + and if not ascending order. + excludes_: Objects with these properties will be excluded from the results + filters: Filter criteria - def _update_data(self, *data_dict, **kwargs): - """ - A private method to process and update entity values correctly. + :return Returns a `Pagination` object that holds the query results + """ - :param data: A dictionary of values to be updated for the entity - :param kwargs: keyword arguments with key-value pairs to be updated + def __init__(self, entity_cls: Entity, criteria=None, page: int = 1, per_page: int = 10, + order_by: set = None): + """Initialize either with empty preferences (when invoked on an Entity) + or carry forward filters and preferences when chained """ - # Load each of the fields given in the data dictionary - self.errors = {} - - for data in data_dict: - if not isinstance(data, dict): - raise AssertionError( - f'Positional argument "{data}" passed must be a dict.' - f'This argument serves as a template for loading common ' - f'values.' - ) - for field_name, val in data.items(): - setattr(self, field_name, val) + self._entity_cls = entity_cls + self._criteria = criteria or Q() + self._result_cache = None + self._page = page or 1 + self._per_page = per_page or 10 - # Now load against the keyword arguments - for field_name, val in kwargs.items(): - setattr(self, field_name, val) + # `order_by` could be empty, or a string or a set. + # Intialize empty set if `order_by` is None + # Convert string to set if `order_by` is a String + # Safe-cast set to a set if `order_by` is already a set + if order_by: + self._order_by = set([order_by]) if isinstance(order_by, str) else set(order_by) + else: + self._order_by = set() - # Raise any errors found during update - if self.errors: - raise ValidationError(self.errors) + def _clone(self): + """ + Return a copy of the current QuerySet. + """ + clone = self.__class__(self._entity_cls, criteria=self._criteria, + page=self._page, per_page=self._per_page, + order_by=self._order_by) + return clone - def to_dict(self): - """ Return entity data as a dictionary """ - return {field_name: getattr(self, field_name, None) - for field_name in self.meta_.declared_fields} + def _add_q(self, q_object): + """Add a Q-object to the current filter.""" + self._criteria = self._criteria._combine(q_object, q_object.connector) - @classmethod - def _retrieve_model(cls): - """Retrieve model details associated with this Entity""" - from protean.core.repository import repo_factory # FIXME Move to a better placement + def filter(self, *args, **kwargs): + """ + Return a new QuerySet instance with the args ANDed to the existing + set. + """ + return self._filter_or_exclude(False, *args, **kwargs) - # Fetch Model class and connected repository from Repository Factory - model_cls = repo_factory.get_model(cls.__name__) - repository = getattr(repo_factory, cls.__name__) + def exclude(self, *args, **kwargs): + """ + Return a new QuerySet instance with NOT (args) ANDed to the existing + set. + """ + return self._filter_or_exclude(True, *args, **kwargs) - return (model_cls, repository) + def _filter_or_exclude(self, negate, *args, **kwargs): + clone = self._clone() + if negate: + clone._add_q(~Q(*args, **kwargs)) + else: + clone._add_q(Q(*args, **kwargs)) + return clone - def clone(self): - """Deepclone the entity, but reset state""" - clone_copy = copy.deepcopy(self) - clone_copy.state_ = EntityState() + def paginate(self, **page_args): + """Update page preferences for query""" + clone = self._clone() + if 'page' in page_args and isinstance(page_args['page'], int): + clone._page = page_args['page'] + if 'per_page' in page_args and isinstance(page_args['per_page'], int): + clone._per_page = page_args['per_page'] - return clone_copy + return clone - ###################### - # Life-cycle methods # - ###################### + def order_by(self, order_by: Union[set, str]): + """Update page setting for filter set""" + clone = self._clone() + if isinstance(order_by, str): + order_by = {order_by} - @classmethod - def get(cls, identifier: Any) -> 'Entity': - """Get a specific Record from the Repository + clone._order_by = clone._order_by.union(order_by) - :param identifier: id of the record to be fetched from the repository. - """ - logger.debug(f'Lookup `{cls.__name__}` object with identifier {identifier}') - # Get the ID field for the entity - filters = { - cls.meta_.id_field.field_name: identifier - } + return clone - # Find this item in the repository or raise Error - results = cls.query.filter(**filters).paginate(page=1, per_page=1).all() - if not results: - raise ObjectNotFoundError( - f'`{cls.__name__}` object with identifier {identifier} ' - f'does not exist.') + def _retrieve_model(self): + """Retrieve model details associated with this Entity""" + # Fetch Model class and connected repository from Repository Factory + model_cls = repo_factory.get_model(self._entity_cls) + repository = repo_factory.get_repository(self._entity_cls) - # Return the first result - return results.first + return (model_cls, repository) - @classmethod - def find_by(cls, **kwargs) -> 'Entity': - """Find a specific entity record that matches one or more criteria. + def all(self): + """Primary method to fetch data based on filters - :param kwargs: named arguments consisting of attr_name and attr_value pairs to search on + Also trigged when the QuerySet is evaluated by calling one of the following methods: + * len() + * bool() + * list() + * Iteration + * Slicing """ - logger.debug(f'Lookup `{cls.__name__}` object with values ' - f'{kwargs}') - - # Find this item in the repository or raise Error - results = cls.query.filter(**kwargs).paginate(page=1, per_page=1).all() + logger.debug(f'Query `{self.__class__.__name__}` objects with filters {self}') - if not results: - raise ObjectNotFoundError( - f'`{cls.__name__}` object with values {[item for item in kwargs.items()]} ' - f'does not exist.') + # Destroy any cached results + self._result_cache = None - # Return the first result - return results.first + # Fetch Model class and connected repository from Repository Factory + model_cls, repository = self._retrieve_model() - @classmethod - def exists(cls, excludes_, **filters): - """ Return `True` if objects matching the provided filters and excludes - exist if not return false. + # order_by clause must be list of keys + order_by = model_cls.opts_.order_by if not self._order_by else self._order_by - Calls the `filter` method by default, but can be overridden for better and - quicker implementations that may be supported by a database. + # Call the read method of the repository + results = repository._filter_objects(self._criteria, self._page, self._per_page, order_by) - :param excludes_: entities without this combination of field name and - values will be returned - """ - results = cls.query.filter(**filters).exclude(**excludes_) - return bool(results) + # Convert the returned results to entity and return it + entity_items = [] + for item in results.items: + entity = model_cls.to_entity(item) + entity.state_.mark_retrieved() + entity_items.append(entity) + results.items = entity_items - @classmethod - def create(cls, *args, **kwargs) -> 'Entity': - """Create a new record in the repository. + # Cache results + self._result_cache = results - Also performs unique validations before creating the entity + return results - :param args: positional arguments for the entity - :param kwargs: keyword arguments for the entity - """ - logger.debug( - f'Creating new `{cls.__name__}` object using data {kwargs}') + def update(self, *data, **kwargs): + """Updates all objects with details given if they match a set of conditions supplied. - model_cls, repository = cls._retrieve_model() + This method updates each object individually, to fire callback methods and ensure + validations are run. + Returns the number of objects matched (which may not be equal to the number of objects + updated if objects rows already have the new value). + """ + updated_item_count = 0 try: - # Build the entity from the input arguments - # Raises validation errors, if any, at this point - entity = cls(*args, **kwargs) + items = self.all() - # Do unique checks, create this object and return it - entity._validate_unique() + for item in items: + item.update(*data, **kwargs) + updated_item_count += 1 + except Exception as exc: + # FIXME Log Exception + raise - # Build the model object and create it - model_obj = repository._create_object(model_cls.from_entity(entity)) + return updated_item_count - # Update the auto fields of the entity - for field_name, field_obj in entity.meta_.declared_fields.items(): - if isinstance(field_obj, Auto): - if isinstance(model_obj, dict): - field_val = model_obj[field_name] - else: - field_val = getattr(model_obj, field_name) - setattr(entity, field_name, field_val) + def delete(self): + """Deletes matching objects from the Repository - # Set Entity status to saved - entity.state_.mark_saved() + Does not throw error if no objects are matched. - return entity - except ValidationError as exc: + Returns the number of objects matched (which may not be equal to the number of objects + deleted if objects rows already have the new value). + """ + # Fetch Model class and connected repository from Repository Factory + deleted_item_count = 0 + try: + items = self.all() + + for item in items: + item.delete() + deleted_item_count += 1 + except Exception as exc: # FIXME Log Exception raise - def save(self): - """Save a new Entity into repository. + return deleted_item_count - Performs unique validations before creating the entity. - """ - logger.debug( - f'Saving `{self.__class__.__name__}` object') + def update_all(self, *args, **kwargs): + """Updates all objects with details given if they match a set of conditions supplied. - # Fetch Model class and connected repository from Repository Factory - model_cls, repository = self.__class__._retrieve_model() + This method forwards filters and updates directly to the repository. It does not + instantiate entities and it does not trigger Entity callbacks or validations. + Update values can be specified either as a dict, or keyword arguments. + + Returns the number of objects matched (which may not be equal to the number of objects + updated if objects rows already have the new value). + """ + updated_item_count = 0 + _, repository = self._retrieve_model() try: - # Do unique checks, update the record and return the Entity - self._validate_unique(create=False) + updated_item_count = repository._update_all_objects(self._criteria, *args, **kwargs) + except Exception as exc: + # FIXME Log Exception + raise - # Build the model object and create it - model_obj = repository._create_object(model_cls.from_entity(self)) + return updated_item_count - # Update the auto fields of the entity - for field_name, field_obj in self.meta_.declared_fields.items(): - if isinstance(field_obj, Auto): - if isinstance(model_obj, dict): - field_val = model_obj[field_name] - else: - field_val = getattr(model_obj, field_name) - setattr(self, field_name, field_val) + def delete_all(self, *args, **kwargs): + """Deletes objects that match a set of conditions supplied. - # Set Entity status to saved - self.state_.mark_saved() + This method forwards filters directly to the repository. It does not instantiate entities and + it does not trigger Entity callbacks or validations. - return self + Returns the number of objects matched and deleted. + """ + deleted_item_count = 0 + _, repository = self._retrieve_model() + try: + deleted_item_count = repository._delete_all_objects(self._criteria) except Exception as exc: # FIXME Log Exception raise - def update(self, *data, **kwargs) -> 'Entity': - """Update a Record in the repository. + return deleted_item_count - Also performs unique validations before creating the entity. + ############################### + # Python Magic method support # + ############################### - Supports both dictionary and keyword argument updates to the entity:: + def __iter__(self): + """Return results on iteration""" + if self._result_cache: + return iter(self._result_cache) - dog.update({'age': 10}) + return iter(self.all()) - dog.update(age=10) + def __len__(self): + """Return length of results""" + if self._result_cache: + return self._result_cache.total - :param data: Dictionary of values to be updated for the entity - :param kwargs: keyword arguments with key-value pairs to be updated - """ - logger.debug(f'Updating existing `{self.__class__.__name__}` object with id {self.id}') + return self.all().total - # Fetch Model class and connected repository from Repository Factory - model_cls, repository = self.__class__._retrieve_model() + def __bool__(self): + """Return True if query results have items""" + if self._result_cache: + return bool(self._result_cache) - try: - # Update entity's data attributes - self._update_data(*data, **kwargs) + return bool(self.all()) - # Do unique checks, update the record and return the Entity - self._validate_unique(create=False) - repository._update_object(model_cls.from_entity(self)) + def __repr__(self): + """Support friendly print of query criteria""" + return ("<%s: entity: %s, criteria: %s, page: %s, per_page: %s, order_by: %s>" % + (self.__class__.__name__, self._entity_cls, + self._criteria.deconstruct(), + self._page, self._per_page, self._order_by)) - # Set Entity status to saved - self.state_.mark_saved() + def __getitem__(self, k): + """Support slicing of results""" + if self._result_cache: + return self._result_cache.items[k] - return self - except Exception as exc: - # FIXME Log Exception - raise + return self.all().items[k] - def _validate_unique(self, create=True): - """ Validate the unique constraints for the entity """ - # Fetch Model class and connected-repository from Repository Factory - model_cls, _ = self.__class__._retrieve_model() + ######################### + # Pagination properties # + ######################### - # Build the filters from the unique constraints - filters, excludes = {}, {} + @property + def total(self): + """Return the total number of records""" + if self._result_cache: + return self._result_cache.total - for field_name, field_obj in self.meta_.unique_fields: - lookup_value = getattr(self, field_name, None) - # Ignore empty lookup values - if lookup_value in Field.empty_values: - continue - # Ignore identifiers on updates - if not create and field_obj.identifier: - excludes[field_name] = lookup_value - continue - filters[field_name] = lookup_value + return self.all().total - # Lookup the objects by the filters and raise error on results - for filter_key, lookup_value in filters.items(): - if self.exists(excludes, **{filter_key: lookup_value}): - field_obj = self.meta_.declared_fields[filter_key] - field_obj.fail('unique', - model_name=model_cls.opts_.model_name, - field_name=filter_key) + @property + def items(self): + """Return result values""" + if self._result_cache: + return self._result_cache.items - def delete(self): - """Delete a Record from the Repository + return self.all().items - will perform callbacks and run validations before deletion. + @property + def first(self): + """Return the first result""" + if self._result_cache: + return self._result_cache.first - Throws ObjectNotFoundError if the object was not found in the repository. - """ - # Fetch Model class and connected repository from Repository Factory - model_cls, repository = self.__class__._retrieve_model() + return self.all().first - try: - if not self.state_.is_destroyed: - # Update entity's data attributes - repository._delete_object(model_cls.from_entity(self)) + @property + def has_next(self): + """Return True if there are more values present""" + if self._result_cache: + return self._result_cache.has_next - # Set Entity status to saved - self.state_.mark_destroyed() + return self.all().has_next - return self - except Exception as exc: - # FIXME Log Exception - raise + @property + def has_prev(self): + """Return True if there are previous values present""" + if self._result_cache: + return self._result_cache.has_prev + + return self.all().has_prev diff --git a/src/protean/core/repository/factory.py b/src/protean/core/repository/factory.py index 2d3ab195..d3b668df 100644 --- a/src/protean/core/repository/factory.py +++ b/src/protean/core/repository/factory.py @@ -1,9 +1,11 @@ """ Factory class for managing repository connections""" import logging +from collections import namedtuple from threading import local from protean.core.exceptions import ConfigurationError from protean.core.provider import providers +from protean.utils.generic import fully_qualified_name logger = logging.getLogger('protean.repository') @@ -16,12 +18,16 @@ class RepositoryFactory: be let go. """ + # EntityRecord Inner Class, implemented as a namedtuple for ease of use. + # This class will store attributes related to Entity and Models, and will be objects + # in the registry dictionary. + EntityRecord = namedtuple( + 'EntityRecord', + 'name, qualname, entity_cls, provider_name, model_cls, fully_baked_model') + def __init__(self): """"Initialize repository factory""" - self._provider_registry = {} - self._entity_registry = {} - self._model_registry = {} - self._fully_baked_models = {} + self._registry = {} self._connections = local() def register(self, model_cls, provider_name=None): @@ -34,21 +40,66 @@ def register(self, model_cls, provider_name=None): # Register the model if it does not exist model_name = model_cls.__name__ - entity_name = model_cls.opts_.entity_cls.__name__ + entity_name = fully_qualified_name(model_cls.opts_.entity_cls) + provider_name = provider_name or model_cls.opts_.bind or 'default' - if self._provider_registry.get(entity_name): - # This probably is an accidental re-registration of the entity - # and we should warn the user of a possible repository confusion - raise ConfigurationError( - f'Entity {entity_name} has already been registered') - else: - self._provider_registry[entity_name] = provider_name or model_cls.opts_.bind or 'default' - self._model_registry[entity_name] = model_cls - self._entity_registry[entity_name] = model_cls.opts_.entity_cls + try: + entity = self._get_entity_by_class(model_cls.opts_.entity_cls) + + if entity: + # This probably is an accidental re-registration of the entity + # and we should warn the user of a possible repository confusion + raise ConfigurationError( + f'Entity {entity_name} has already been registered') + except AssertionError: + # Entity has not been registered yet. Let's go ahead and add it to the registry. + entity_record = RepositoryFactory.EntityRecord( + name=model_cls.opts_.entity_cls.__name__, + qualname=entity_name, + entity_cls=model_cls.opts_.entity_cls, + provider_name=provider_name, + model_cls=model_cls, + fully_baked_model=False + ) + self._registry[entity_name] = entity_record logger.debug( f'Registered model {model_name} for entity {entity_name} with provider' f' {provider_name}.') + def _find_entity_in_records_by_class_name(self, entity_name): + """Fetch by Entity Name in values""" + records = { + key: value for (key, value) + in self._registry.items() + if value.name == entity_name + } + # If more than one record was found, we are dealing with the case of + # an Entity name present in multiple places (packages or plugins). Throw an error + # and ask for a fully qualified Entity name to be specified + if len(records) > 1: + raise ConfigurationError( + f'Entity with name {entity_name} has been registered twice. ' + f'Please use fully qualified Entity name to specify the exact Entity.') + elif len(records) == 1: + return next(iter(records.values())) + else: + raise AssertionError(f'No Entity registered with name {entity_name}') + + def _get_entity_by_class(self, entity_cls): + """Fetch Entity record with Entity class details""" + entity_qualname = fully_qualified_name(entity_cls) + if entity_qualname in self._registry: + return self._registry[entity_qualname] + else: + return self._find_entity_in_records_by_class_name(entity_cls.__name__) + + def _get_entity_by_name(self, entity_name): + """Fetch Entity record with an Entity name""" + if entity_name in self._registry: + return self._registry[entity_name] + else: + return self._find_entity_in_records_by_class_name(entity_name) + def _validate_model_cls(self, model_cls): """Validate that Model is a valid class""" # Import here to avoid cyclic dependency @@ -58,45 +109,40 @@ def _validate_model_cls(self, model_cls): raise AssertionError( f'Model {model_cls} must be subclass of `BaseModel`') - def get_model(self, entity_name): + def get_model(self, entity_cls): """Retrieve Model class connected to Entity""" - if entity_name in self._fully_baked_models: - return self._fully_baked_models[entity_name] + entity_record = self._get_entity_by_class(entity_cls) - try: - # This will trigger ``AssertionError`` if entity is not registered - model_cls = self._model_registry[entity_name] - - provider = self.get_provider(entity_name) - fully_baked_model = provider.get_model(model_cls) + model_cls = None + if entity_record.fully_baked_model: + model_cls = entity_record.model_cls + else: + provider = self.get_provider(entity_record.provider_name) + baked_model_cls = provider.get_model(entity_record.model_cls) # Record for future reference - self._fully_baked_models['entity_name'] = fully_baked_model + new_entity_record = entity_record._replace(model_cls=baked_model_cls, + fully_baked_model=True) + self._registry[entity_record.qualname] = new_entity_record - return fully_baked_model - except KeyError: - raise AssertionError(f'No Model registered for {entity_name}') + model_cls = baked_model_cls + + return model_cls def get_entity(self, entity_name): """Retrieve Entity class registered by `entity_name`""" - try: - return self._entity_registry[entity_name] - except KeyError: - raise AssertionError(f'No Entity registered with name {entity_name}') + return self._get_entity_by_name(entity_name).entity_cls - def get_provider(self, entity_name): - """Retrieve the provider name registered for the entity""" - provider_name = self._provider_registry[entity_name] + def get_provider(self, provider_name): + """Retrieve the provider object with a given provider name""" return providers.get_provider(provider_name) - def __getattr__(self, entity_name): - try: - provider = self.get_provider(entity_name) + def get_repository(self, entity_cls): + """Retrieve a Repository for the Model with a live connection""" + entity_record = self._get_entity_by_class(entity_cls) + provider = self.get_provider(entity_record.provider_name) - # Fetch a repository object with live connection - return provider.get_repository(self._model_registry[entity_name]) - except KeyError: - raise AssertionError(f'No Model registered for {entity_name}') + return provider.get_repository(entity_record.model_cls) repo_factory = RepositoryFactory() diff --git a/src/protean/utils/generic.py b/src/protean/utils/generic.py index 6a5fb164..30340dab 100644 --- a/src/protean/utils/generic.py +++ b/src/protean/utils/generic.py @@ -8,3 +8,8 @@ def __init__(self, fget): def __get__(self, owner_self, owner_cls): return self.fget(owner_cls) + + +def fully_qualified_name(cls): + """Return Fully Qualified name along with module""" + return '.'.join([cls.__module__, cls.__qualname__]) diff --git a/tests/conftest.py b/tests/conftest.py index f3530e4b..227d0c8f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -45,24 +45,29 @@ def register_models(): def run_around_tests(): """Cleanup Database after each test run""" from protean.core.repository import repo_factory + from tests.support.dog import (Dog, RelatedDog, DogRelatedByEmail, HasOneDog1, + HasOneDog2, HasOneDog3, HasManyDog1, HasManyDog2, + HasManyDog3, ThreadedDog) + from tests.support.human import (Human, HasOneHuman1, HasOneHuman2, HasOneHuman3, + HasManyHuman1, HasManyHuman2, HasManyHuman3) # A test function will be run at this point yield - repo_factory.Dog.delete_all() - repo_factory.RelatedDog.delete_all() - repo_factory.DogRelatedByEmail.delete_all() - repo_factory.HasOneDog1.delete_all() - repo_factory.HasOneDog2.delete_all() - repo_factory.HasOneDog3.delete_all() - repo_factory.HasManyDog1.delete_all() - repo_factory.HasManyDog2.delete_all() - repo_factory.HasManyDog3.delete_all() - repo_factory.Human.delete_all() - repo_factory.HasOneHuman1.delete_all() - repo_factory.HasOneHuman2.delete_all() - repo_factory.HasOneHuman3.delete_all() - repo_factory.HasManyHuman1.delete_all() - repo_factory.HasManyHuman2.delete_all() - repo_factory.HasManyHuman3.delete_all() - repo_factory.ThreadedDog.delete_all() + repo_factory.get_repository(Dog).delete_all() + repo_factory.get_repository(RelatedDog).delete_all() + repo_factory.get_repository(DogRelatedByEmail).delete_all() + repo_factory.get_repository(HasOneDog1).delete_all() + repo_factory.get_repository(HasOneDog2).delete_all() + repo_factory.get_repository(HasOneDog3).delete_all() + repo_factory.get_repository(HasManyDog1).delete_all() + repo_factory.get_repository(HasManyDog2).delete_all() + repo_factory.get_repository(HasManyDog3).delete_all() + repo_factory.get_repository(Human).delete_all() + repo_factory.get_repository(HasOneHuman1).delete_all() + repo_factory.get_repository(HasOneHuman2).delete_all() + repo_factory.get_repository(HasOneHuman3).delete_all() + repo_factory.get_repository(HasManyHuman1).delete_all() + repo_factory.get_repository(HasManyHuman2).delete_all() + repo_factory.get_repository(HasManyHuman3).delete_all() + repo_factory.get_repository(ThreadedDog).delete_all() diff --git a/tests/core/test_entity.py b/tests/core/test_entity.py index 3bbb3aec..7911dc8b 100644 --- a/tests/core/test_entity.py +++ b/tests/core/test_entity.py @@ -353,7 +353,7 @@ def test_query_init(self): assert query is not None assert isinstance(query, QuerySet) - assert vars(query) == vars(QuerySet('Dog')) + assert vars(query) == vars(QuerySet(Dog)) def test_filter_chain_initialization_from_entity(self): """ Test that chaining returns a QuerySet for further chaining """ diff --git a/tests/core/test_queryset.py b/tests/core/test_queryset.py index f600fe8d..eaa5846c 100644 --- a/tests/core/test_queryset.py +++ b/tests/core/test_queryset.py @@ -31,7 +31,7 @@ def test_list(self): def test_repr(self): """Test that filter is evaluted on calling `list()`""" query = Dog.query.filter(owner='John').order_by('age') - assert repr(query) == (", " "criteria: ('protean.utils.query.Q', (), {'owner': 'John'}), " "page: 1, " "per_page: 10, order_by: {'age'}>") diff --git a/tests/core/test_repository.py b/tests/core/test_repository.py index 6e74b2e2..5009d45e 100644 --- a/tests/core/test_repository.py +++ b/tests/core/test_repository.py @@ -15,7 +15,7 @@ def test_init(self): """Test successful access to the Dog repository""" Dog.query.all() - current_db = dict(repo_factory.Dog.conn) + current_db = dict(repo_factory.get_repository(Dog).conn) assert current_db['data'] == {'dogs': {}} def test_create_error(self): diff --git a/tests/support/dog.py b/tests/support/dog.py index 57e4be3d..02049e82 100644 --- a/tests/support/dog.py +++ b/tests/support/dog.py @@ -49,7 +49,7 @@ class RelatedDog2(Entity): """ name = field.String(required=True, unique=True, max_length=50) age = field.Integer(default=5) - owner = field.Reference('Human') + owner = field.Reference('tests.support.human.Human') class RelatedDog2Model(DictModel): diff --git a/tests/support/human.py b/tests/support/human.py index 992928af..ee64d064 100644 --- a/tests/support/human.py +++ b/tests/support/human.py @@ -27,7 +27,7 @@ class HasOneHuman1(Entity): first_name = field.String(required=True, unique=True, max_length=50) last_name = field.String(required=True, unique=True, max_length=50) email = field.String(required=True, unique=True, max_length=50) - dog = association.HasOne('HasOneDog1') + dog = association.HasOne('tests.support.dog.HasOneDog1') class HasOneHuman1Model(DictModel): @@ -46,7 +46,7 @@ class HasOneHuman2(Entity): first_name = field.String(required=True, unique=True, max_length=50) last_name = field.String(required=True, unique=True, max_length=50) email = field.String(required=True, unique=True, max_length=50) - dog = association.HasOne('HasOneDog2', via='human_id') + dog = association.HasOne('tests.support.dog.HasOneDog2', via='human_id') class HasOneHuman2Model(DictModel): @@ -65,7 +65,7 @@ class HasOneHuman3(Entity): first_name = field.String(required=True, unique=True, max_length=50) last_name = field.String(required=True, unique=True, max_length=50) email = field.String(required=True, unique=True, max_length=50) - dog = association.HasOne('HasOneDog3', via='human_id') + dog = association.HasOne('tests.support.dog.HasOneDog3', via='human_id') class HasOneHuman3Model(DictModel): @@ -82,7 +82,7 @@ class HasManyHuman1(Entity): first_name = field.String(required=True, unique=True, max_length=50) last_name = field.String(required=True, unique=True, max_length=50) email = field.String(required=True, unique=True, max_length=50) - dogs = association.HasMany('HasManyDog1') + dogs = association.HasMany('tests.support.dog.HasManyDog1') class HasManyHuman1Model(DictModel): @@ -120,7 +120,7 @@ class HasManyHuman3(Entity): first_name = field.String(required=True, unique=True, max_length=50) last_name = field.String(required=True, unique=True, max_length=50) email = field.String(required=True, unique=True, max_length=50) - dogs = association.HasMany('HasManyDog3', via='human_id') + dogs = association.HasMany('tests.support.dog.HasManyDog3', via='human_id') class HasManyHuman3Model(DictModel):