diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0178801e..d42d320d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -5,12 +5,8 @@ tag = True [bumpversion:file:pyproject.toml] -[bumpversion:file:docs-sphinx/conf.py] - [bumpversion:file:src/protean/__init__.py] [bumpversion:file:src/protean/template/domain_template/pyproject.toml.jinja] -[bumpversion:file:docs-sphinx/user/installation.rst] - [bumpversion:file:docs/guides/getting-started/installation.md] \ No newline at end of file diff --git a/.gitignore b/.gitignore index b4d3d78d..153aab8a 100644 --- a/.gitignore +++ b/.gitignore @@ -65,9 +65,6 @@ instance/ # Scrapy stuff: .scrapy -# Sphinx documentation -docs-sphinx/_build/ - # PyBuilder target/ @@ -135,9 +132,6 @@ output/*/index.html .testmondata *.rdb -# Sphinx -docs-sphinx/.doctrees - # TODOs TODO *.todo diff --git a/.readthedocs.yaml b/.readthedocs.yaml deleted file mode 100644 index 140db4a8..00000000 --- a/.readthedocs.yaml +++ /dev/null @@ -1,22 +0,0 @@ -# .readthedocs.yaml -# Read the Docs configuration file -# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details - -# Required -version: 2 - -# Set the version of Python and other tools you might need -build: - os: ubuntu-22.04 - tools: - python: "3.11" - -# Build documentation in the docs-sphinx/ directory with Sphinx -sphinx: - configuration: docs-sphinx/conf.py - -# Explicitly set the version of Python and its requirements -# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html -python: - install: - - requirements: docs-sphinx/requirements.txt \ No newline at end of file diff --git a/docs-sphinx/Makefile b/docs-sphinx/Makefile deleted file mode 100644 index 43276bb9..00000000 --- a/docs-sphinx/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = ../build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs-sphinx/adapters/database.rst b/docs-sphinx/adapters/database.rst deleted file mode 100644 index 17ffd492..00000000 --- a/docs-sphinx/adapters/database.rst +++ /dev/null @@ -1,57 +0,0 @@ -Database Adapters -================= - -Elasticsearch -------------- - -To use Elasticsearch as a database provider, use the below configuration setting: - -.. code-block:: python - - DATABASES = { - "default": { - "PROVIDER": "protean.adapters.repository.elasticsearch.ESProvider", - "DATABASE": Database.ELASTICSEARCH.value, - "DATABASE_URI": {"hosts": ["localhost"]}, - "NAMESPACE_PREFIX": os.environ.get("PROTEAN_ENV"), - "SETTINGS": {"number_of_shards": 3} - }, - } - -Additional options are available for finer control: - -.. py:data:: NAMESPACE_PREFIX - - Index names in Elasticsearch instance are prefixed with the specified string. For example, if the namespace - prefix is "prod", the index of an aggregate `Person` will be `prod-person`. - -.. py:data:: NAMESPACE_SEPARATOR - - Custom character to join NAMESPACE_PREFIX and index name. Default is hyphen (`-`). For example, with - `NAMESPACE_SEPARATOR` as `_`, the index of aggregate `Person` will be `prod_person`. - -.. py:data:: SETTINGS - - Index settings passed on to Elasticsearch instance. - -Note that if you supply a custom Elasticsearch Model with an `Index` inner class, the options specified in the -inner class override those at the config level. - -In the sample below, with the configuration settings specified above, the options at Aggregate level will be -overridden and the Elasticsearch Model will have the default index value `*` and number of shards as `1`. - -.. code-block:: python - - class Person(BaseAggregate): - name = String() - about = Text() - - class Meta: - schema_name = "people" - - class PeopleModel(ElasticsearchModel): - name = Text(fields={"raw": Keyword()}) - about = Text() - - class Index: - settings = {"number_of_shards": 1} diff --git a/docs-sphinx/api.rst b/docs-sphinx/api.rst deleted file mode 100644 index df8bba6d..00000000 --- a/docs-sphinx/api.rst +++ /dev/null @@ -1,33 +0,0 @@ -API -=== - -.. module:: protean - -This part of the documentation covers all the interfaces of Protean. For parts where Protean depends on external -libraries, we document the most important right here and provide links to the canonical documentation. - -Core Domain ------------ - -.. autoclass:: Domain - :members: - :inherited-members: - -Domain Elements ---------------- - -.. autoclass:: BaseAggregate - :members: - :inherited-members: - -Configuration -------------- - -.. autoclass:: protean.Config - :members: - -Exceptions ----------- - -.. automodule:: protean.exceptions - :members: diff --git a/docs-sphinx/community/changelog.rst b/docs-sphinx/community/changelog.rst deleted file mode 100644 index 09929fe4..00000000 --- a/docs-sphinx/community/changelog.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../CHANGELOG.rst diff --git a/docs-sphinx/community/code-of-conduct.rst b/docs-sphinx/community/code-of-conduct.rst deleted file mode 100644 index 2d70708d..00000000 --- a/docs-sphinx/community/code-of-conduct.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../CODE_OF_CONDUCT.rst diff --git a/docs-sphinx/community/contributing.rst b/docs-sphinx/community/contributing.rst deleted file mode 100644 index ac7b6bcf..00000000 --- a/docs-sphinx/community/contributing.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../CONTRIBUTING.rst diff --git a/docs-sphinx/conf.py b/docs-sphinx/conf.py deleted file mode 100644 index 4d1c6db7..00000000 --- a/docs-sphinx/conf.py +++ /dev/null @@ -1,74 +0,0 @@ -# cSpell: disable - -# -*- coding: utf-8 -*- -from __future__ import unicode_literals - -import datetime -import os - -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.coverage", - "sphinx.ext.doctest", - "sphinx.ext.extlinks", - "sphinx.ext.ifconfig", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "sphinx_tabs.tabs", -] -if os.getenv("SPELLCHECK"): - extensions += "sphinxcontrib.spelling" - spelling_show_suggestions = True - spelling_lang = "en_US" - -source_suffix = ".rst" -master_doc = "index" -project = "Protean" -year = datetime.date.today().strftime("%Y") -author = "Subhash Bhushan C" -copyright = "{0}, {1}".format(year, author) -version = release = "0.12.1" - -pygments_style = "autumn" -templates_path = ["."] -extlinks = { - "issue": ("https://github.com/proteanhq/protean/issues/%s", "#"), - "pr": ("https://github.com/proteanhq/protean/pull/%s", "PR #"), -} -# on_rtd is whether we are on readthedocs.org -# on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -# if not on_rtd: # only set the theme if we're building docs locally -# html_theme = 'alabaster' - -# Change theme to Alabaster -html_theme = "alabaster" - -html_theme_options = { - "description": "The Pragmatic Framework for Ambitious Applications", - "show_powered_by": False, - "github_user": "proteanhq", - "github_repo": "protean", - "github_banner": True, - "show_related": False, - "show_relbar_bottom": True, - "font_family": "Lucida Grande", - "fixed_sidebar": True, -} - -html_static_path = ["static"] - -html_show_sourcelink = False -html_use_smartypants = True -html_last_updated_fmt = "%b %d, %Y" -html_split_index = False -html_sidebars = { - "**": ["about.html", "localtoc.html", "sourcelink.html", "searchbox.html"], -} -html_short_title = "%s-%s" % (project, version) - -napoleon_use_ivar = True -napoleon_use_rtype = False -napoleon_use_param = False diff --git a/docs-sphinx/images/consuming-events.jpg b/docs-sphinx/images/consuming-events.jpg deleted file mode 100644 index 9d01d996..00000000 Binary files a/docs-sphinx/images/consuming-events.jpg and /dev/null differ diff --git a/docs-sphinx/images/raising-events.jpg b/docs-sphinx/images/raising-events.jpg deleted file mode 100644 index 7e7a6b11..00000000 Binary files a/docs-sphinx/images/raising-events.jpg and /dev/null differ diff --git a/docs-sphinx/index.rst b/docs-sphinx/index.rst deleted file mode 100644 index 0beb72fe..00000000 --- a/docs-sphinx/index.rst +++ /dev/null @@ -1,110 +0,0 @@ -Protean -======= - -Release v\ |version| - -.. image:: https://github.com/proteanhq/protean/actions/workflows/ci.yml/badge.svg?branch=master - :target: https://github.com/proteanhq/protean/actions - :alt: Build Status -.. image:: https://codecov.io/gh/proteanhq/protean/branch/master/graph/badge.svg - :target: https://codecov.io/gh/proteanhq/protean - :alt: Coverage -.. image:: https://pyup.io/repos/github/proteanhq/protean/shield.svg - :target: https://pyup.io/repos/github/proteanhq/protean/ - :alt: Updates - -Protean is a DDD and CQRS-based framework that helps you build Event-driven applications. - -Get started with :doc:`user/installation` and then get an overview with the :doc:`user/quickstart`. - -.. warning:: Protean's documentation is *Work-In-Progress* - there are significant portions of - functionality still missing. Refer to the contribution guide to help grow the documentation. - -------------------- - -Overview --------- - -Protean helps you build applications that can scale and adapt to growing requirements without significant rework. - -At its core, Protean encourages a Domain-Driven Design (DDD) approach to development, with support for artifacts -necessary to express your domain succinctly and precisely. It also allows you to remain agnostic to the underlying -technology by keeping implementation details out of view. - -Protean can be thought of having three capabilities: - -- *Service-Oriented* - - Develop your application as one or more subdomains that run independently as Microservices -- *Event-Driven*: - - Use events to propagate changes across subdomains or become eventually consistent within a Bounded Context. -- *Adapter-based*: - - Use Remain technology-agnostic by exposing Port interfaces to the infrastructure, with multiple adapters - supported out of the box. - -Read :doc:`user/foreword` to understand Protean's philosophy. - -.. note:: It is assumed that you have some prior knowledge about *Domain-Driven Design* (DDD) and *Command Query - Responsibility Segregation* (CQRS) architectural patterns. - - If you do not have sufficient background in these topics, you should go through standard texts - to understand Protean's behavior better. - -.. warning:: **Protean** is currently under active development. APIs and interfaces are to be expected to change - drastically and newer releases will almost certainly be backward incompatible. - - If you are interested in using Protean for your project, you may want to wait for the announcement of first - stable production-ready version. If you want to use the framework *right now*, drop us an - `email `_. - -------------------- - -User Guide ----------- - -.. toctree:: - :maxdepth: 2 - - user/foreword - user/installation - user/quickstart - user/composing-a-domain - user/domain-definition - user/entities-and-vos - user/fields - user/persistence - user/services - user/eventing - user/event-sourcing - user/config - user/cli - -Adapters --------- - -.. toctree:: - :maxdepth: 1 - - adapters/database - -API Reference -------------- - -If you are looking for information on a specific function, class or -method, this part of the documentation is for you. - -.. toctree:: - :maxdepth: 2 - - api - -Community ---------- - -The best way to track the development of Protean is through the `the GitHub repo `_. - -.. toctree:: - :maxdepth: 1 - - community/changelog - community/code-of-conduct - community/contributing diff --git a/docs-sphinx/make.bat b/docs-sphinx/make.bat deleted file mode 100644 index 3e4fe6d7..00000000 --- a/docs-sphinx/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=docs -set BUILDDIR=build - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/docs-sphinx/requirements.txt b/docs-sphinx/requirements.txt deleted file mode 100644 index b2c121b3..00000000 --- a/docs-sphinx/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -livereload>=2.6.3 -sphinx>=7.2.6 -sphinx-tabs>=3.4.4 \ No newline at end of file diff --git a/docs-sphinx/static/custom.css b/docs-sphinx/static/custom.css deleted file mode 100644 index 4d719074..00000000 --- a/docs-sphinx/static/custom.css +++ /dev/null @@ -1,3 +0,0 @@ -img { - height: auto !important; -} diff --git a/docs-sphinx/user/cli.rst b/docs-sphinx/user/cli.rst deleted file mode 100644 index 80f65b2d..00000000 --- a/docs-sphinx/user/cli.rst +++ /dev/null @@ -1,83 +0,0 @@ -Command Line Interface -====================== - -Installing Protean installs the ``protean`` script, a `Click`_ command line -interface, in your virtualenv. Executed from the terminal, this script gives -access to built-in, extension, and application-defined commands. The ``--help`` -option will give more information about any commands and options. - -.. _Click: https://click.palletsprojects.com/ - - -Application Discovery ---------------------- - -The ``protean`` command is installed by Protean, not your application; it must be -told where to find your application in order to use it. The ``PROTEAN_DOMAIN`` -environment variable is used to specify how to load the application. - -.. tabs:: - - .. group-tab:: Bash - - .. code-block:: text - - $ export PROTEAN_DOMAIN=shipping - $ protean server - - .. group-tab:: CMD - - .. code-block:: text - - > set PROTEAN_DOMAIN=shipping - > protean server - - .. group-tab:: Powershell - - .. code-block:: text - - > $env:PROTEAN_DOMAIN = "shipping" - > protean server - -While ``PROTEAN_DOMAIN`` supports a variety of options for specifying your -application, most use cases should be simple. Here are the typical values: - -(nothing) - The name "domain" or "subdomain" is imported (as a ".py" file, or package), - automatically detecting an app (``domain`` or ``subdomain``). - -``PROTEAN_DOMAIN=shipping`` - The given name is imported, automatically detecting a domain (``domain`` - or ``subdomain``). - ----- - -``PROTEAN_DOMAIN`` has three parts: an optional path that sets the current working -directory, a Python file or dotted import path, and an optional variable -name of the instance or factory. If the name is a factory, it can optionally -be followed by arguments in parentheses. The following values demonstrate these -parts: - -``PROTEAN_DOMAIN=src/shipping`` - Sets the current working directory to ``src`` then imports ``shipping``. - -``PROTEAN_DOMAIN=shipping.domain`` - Imports the path ``shipping.domain``. - -``PROTEAN_DOMAIN=shipping:dom2`` - Uses the ``dom2`` Flask instance in ``shipping``. - - -If ``PROTEAN_DOMAIN`` is not set, the command will try to import "domain" or -"subdomain" (as a ".py" file, or package) and try to detect a domain instance. - -Within the given import, the command looks for a domain instance named -``domain`` or ``subdomain``, then any domain instance. - -Run the Development Server --------------------------- - -The :func:`server ` command will start the background development server:: - - $ protean server - * Starting server... diff --git a/docs-sphinx/user/composing-a-domain.rst b/docs-sphinx/user/composing-a-domain.rst deleted file mode 100644 index fc98ca23..00000000 --- a/docs-sphinx/user/composing-a-domain.rst +++ /dev/null @@ -1,119 +0,0 @@ -Composing a Domain -================== - -A domain in Protean represents a ``Bounded Context`` of the application. Because it is aware of all domain elements, -the :class:`protean.Domain` in Protean acts as a ``Composition Root``, with which all modules are composed together. -It is responsible for creating and maintaining a graph of all the domain elements in the Bounded Context. - -The Domain is the one-stop gateway to: - -- Register domain elements -- Retrieve dynamically-constructed artifacts like repositories and models -- Access injected technology components at runtime - -Define a Domain ---------------------- - -Constructing the object graph is a two-step procedure. First, you initialize a domain object at a reasonable starting -point of the application. - -.. code-block:: python - - from protean import Domain - domain = Domain(__name__) - -Registering Elements to the Domain ----------------------------------- - -Next, the ``domain`` object is referenced by the rest of the application to register elements and participate -in application configuration. - -.. code-block:: python - - @domain.aggregate - class User: - name = String() - email = String(required=True) - - -Initializing the Domain ------------------------ - -Finally, the domain is initialized by calling the ``init`` method. This method will construct the object graph and -inject dependencies into the domain elements. - -.. code-block:: python - - domain.init() - -By default, a protean domain is configured to use an in-memory repository. This is useful for testing and prototyping. - -If you do not want Protean to traverse the directory structure to discover domain elements, you can pass the -``traverse`` flag as ``False`` to the ``init`` method. - -You can optionally pass a config file to the domain before initializing it. Refer to :doc:`config` to understand the different ways to configure the domain. - -Activating a Domain -------------------- - -A domain is activated by pushing up its context to the top of the domain stack. - -.. code-block:: python - - context = domain.domain_context() - context.push() - -Subsequent calls to ``protean.globals.current_domain`` will return the currently active domain. Once the task has been -completed, it is recommended that the domain stack be reset to its original state by calling ``context.pop()``. - -This is a convenient pattern to use in conjunction with most API frameworks. The domain's context is pushed up at the -beginning of a request and popped out once the request is processed. - -When to compose ---------------- - -The composition should take place *as close to the application’s entry point as possible*. In simple console -applications, the ``Main`` method is a good entry point. But for most web applications that spin up their own runtime, -we depend on the callbacks or hooks of the framework to compose the object graph. - -Accordingly, depending on the software stack you will ultimately use, you will decide when to compose the object graph. -For example, if you are using Flask as the API framework, you would compose the ``domain`` along with -the ``app`` object. - -.. code-block:: python - - import logging.config - import os - - from flask import Flask - - from sample_app import domain - - def create_app(): - app = Flask(__name__, static_folder=None) - - # Configure domain - current_path = os.path.abspath(os.path.dirname(__file__)) - config_path = os.path.join(current_path, "./../config.py") - domain.config.from_pyfile(config_path) - - logging.config.dictConfig(domain.config['LOGGING_CONFIG']) - - from api.views.registration import registration_api - from api.views.user import user_api - app.register_blueprint(registration_api) - app.register_blueprint(user_api) - - @app.before_request - def set_context(): - domain.init() - - # Push up a Domain Context - # This should be done within Flask App - context = domain.domain_context() - context.push() - - return app - -Of note is the activation of the domain with the help of ``@app.before_request`` decorator above - this is -``Flask``-specific. Refer to :ref:`adapter-api` section to understand how to accomplish this for other frameworks. diff --git a/docs-sphinx/user/config.rst b/docs-sphinx/user/config.rst deleted file mode 100644 index 876802aa..00000000 --- a/docs-sphinx/user/config.rst +++ /dev/null @@ -1,307 +0,0 @@ -Configuration Handling -====================== - -Protean provides a plethora of knobs to control your application behavior in runtime. They range from application -internals, like choosing an Identity Type (UUID, Integer, or Database-supplied), to technology components, like -database to use. You specify these options as configuration settings. - -You would typically supply the configuration when the application starts up. The configuration can be provided in -multiple formats as we will see below. You can even hard-code the configuration in your application code, though -it is not recommended for obvious reasons. - -Independent of how you load your config, there is a config object available which holds the loaded configuration -values: The :attr:`~protean.Domain.config` attribute of the :class:`~protean.Domain` object. This is the place -where Protean itself puts certain configuration values and also where adapters can put their configuration values. - -Configuration Basics --------------------- - -The :attr:`~protean.Domain.config` is actually a subclass of a dictionary and -can be modified just like any dictionary:: - - domain = Domain(__name__) - domain.config['TESTING'] = True - -Certain configuration values are also forwarded to the -:attr:`~protean.Domain` object so you can read and write them from there:: - - app.testing = True - -To update multiple keys at once you can use the :meth:`dict.update` method:: - - app.config.update( - TESTING=True, - SECRET_KEY=b'6@BGQz^i6bpa3dA' - ) - -Builtin Configuration Values ----------------------------- - -The following configuration values are used internally by Protean: - -.. py:data:: ENV - - What environment the app is running in. Protean and extensions may - enable behaviors based on the environment, such as enabling debug - mode. The :attr:`~protean.domain.Domain.env` attribute maps to this config - key. This is set by the :envvar:`PROTEAN_ENV` environment variable and - may not behave as expected if set in code. - - **Do not enable development when deploying in production.** - - Default: ``'production'`` - -.. py:data:: DEBUG - - Whether debug mode is enabled. This is enabled when ENV is ``development`` - and is overridden by the ``PROTEAN_DEBUG`` environment variable. - It may not behave as expected if set in code. - - ***Do not enable debug mode when deploying in production.*** - - Default: ``True`` - -.. py:data:: IDENTITY_STRATEGY - - What Strategy to use generate Unique Identifiers. - - Options: - - * ``UUID``: Use ``UUID4`` generated identifiers. This is the preferred strategy. - * ``DATABASE``: Use a database sequence to gather unique identifiers. The Database Sequence is specified as part of the Entity's ``Meta`` information. - * ``FUNCTION``: Use a function to generate a unique identifier. The function name needs to be supplied to the ``IDENTITY_FUNCTION`` parameter. - - Options are defined in :ref:`identity`. - - Default: ``UUID`` - -.. py:data:: IDENTITY_TYPE - - The type of value acting as the identifier for the domain. Can be among ``INTEGER``, ``STRING``, or ``UUID``. - -.. py:data:: DATABASES - - Protean allows you to specify the database provider for your application. By virtue of using a Ports and Adapters architecture, you can switch between databases at any time, and your application should work seamlessly. - - By default, Protean is packaged with a :ref:`implementation-in-memory-database` that works perfectly well in testing environments, within a single bounded context. But it is recommended to use durable database providers in production and for large scale deployments. Protean comes with built-in support for SQLAlchemy and Elasticsearch, but you can easily extend the mechanism to support your :ref:`own provider`. - - Default: - - .. code-block:: json - - { - "default": { - "PROVIDER": "protean.impl.repository.dict_repo.DictProvider" - } - } - -.. py:data:: BROKERS - - Protean uses Message Brokers for publishing and propagating events within and across Bounded Contexts. - - By default, Protean is packaged with a :ref:`inline-broker` that is sufficient in a development environment, within a single bounded context. But it is recommended to use full-fledged message brokers in production and for large scale deployments. Protean comes with built-in support Redis, but you can easily extend the mechanism to support your :ref:`own broker`. - - Options: - - * ``INLINE``: default. Use Protean's in-built message broker for development and testing purposes. - * ``REDIS``: Use Redis PubSub infrastructure as the message broker - - Options are defined in :ref:`api-brokers`. - - Default: - - .. code-block:: json - - { - "default": { - "PROVIDER": "protean.adapters.InlineBroker" - } - } - -.. py:data:: EVENT_STRATEGY - - The event processing strategy to use. Read :ref:`event-processing-strategies` for a detailed discussion. - -Configuring from Python Files ------------------------------ - -You can supply configuration as separate files, ideally located outside the actual application package. This makes -packaging and distributing the application possible via various package handling tools (:doc:`/patterns/distribute`). - -So a common pattern is this:: - - domain = Domain(__name__) - domain.config.from_object('mydomain.default_settings') - domain.config.from_envvar('MYDOMAIN_SETTINGS') - -This first loads the configuration from the `mydomain.default_settings` module and then overrides the values -with the contents of the file the :envvar:`MYDOMAIN_SETTINGS` environment variable points to. This environment -variable can be set in the shell before starting the server: - -The configuration files themselves are actual Python files. Only values in uppercase are actually stored in the -config object later on. So make sure to use uppercase letters for your config keys. - -Here is an example of a configuration file:: - - # Example configuration - SECRET_KEY = b'secret-key' - -Make sure to load the configuration very early on, so that both the domain and its adapters have the ability to access the configuration when starting up. There are other methods on the config object as well to load from individual files. For a complete reference, read the :class:`~protean.Config` object's documentation. - -Configuring from Data Files ---------------------------- - -It is also possible to load configuration from a file in a format of your choice using -:meth:`~protean.Config.from_file`. For example to load from a TOML file: - -.. code-block:: python - - import toml - domain.config.from_file("config.toml", load=toml.load) - -Or from a JSON file: - -.. code-block:: python - - import json - domain.config.from_file("config.json", load=json.load) - -Configuring from Environment Variables --------------------------------------- - -In addition to pointing to configuration files using environment variables, you may find it useful (or necessary) to -control your configuration values directly from the environment. - -Environment variables can be set in the shell before starting the server. - -.. tabs:: - - .. group-tab:: Bash - - .. code-block:: text - - $ export SECRET_KEY="secret-key" - $ protean server - * Server started - - .. group-tab:: CMD - - .. code-block:: text - - > set SECRET_KEY="secret-key" - > protean server - * Server started - - .. group-tab:: Powershell - - .. code-block:: text - - > $env:SECRET_KEY = "secret-key" - > protean server - * Server started - -While this approach is straightforward to use, it is important to remember that environment variables are strings -- -they are not automatically deserialized into Python types. - -Here is an example of a configuration file that uses environment variables:: - - import os - - _mail_enabled = os.environ.get("MAIL_ENABLED", default="true") - MAIL_ENABLED = _mail_enabled.lower() in {"1", "t", "true"} - - SECRET_KEY = os.environ.get("SECRET_KEY") - - if not SECRET_KEY: - raise ValueError("No SECRET_KEY set supplied") - - -Notice that any value besides an empty string will be interpreted as a boolean ``True`` value in Python, which -requires care if an environment explicitly sets values intended to be ``False``. - -There are other methods on the config object as well to load from individual files. For a complete -reference, read the :class:`~protean.Config` class documentation. - - -Development / Production ------------------------- - -Most applications need more than one configuration. There should be at least separate configurations for the -production server and the one used during development. The easiest way to handle this is to use a default -configuration that is always loaded and part of the version control, and a separate configuration that overrides the -values as necessary as mentioned in the example above:: - - domain = Domain(__name__) - domain.config.from_object('mydomain.default_settings') - domain.config.from_envvar('MYDOMAIN_SETTINGS') - -Then you just have to add a separate :file:`config.py` file and export ``MYDOMAIN_SETTINGS=/path/to/config.py`` and -you are done. However there are alternative ways as well. For example you could use imports or subclassing. - -What is very popular in the Django world is to make the import explicit in the config file by adding ``from mydomain. -default_settings import *`` to the top of the file and then overriding the changes by hand. You could also inspect -an environment variable like ``MYDOMAIN_MODE`` and set that to `production`, `development` etc. and import different -hard-coded files based on that. - -An interesting pattern is also to use classes and inheritance for configuration:: - - class Config(object): - TESTING = False - - class ProductionConfig(Config): - MAIL_ENABLED = True - - class DevelopmentConfig(Config): - MAIL_ENABLED = False - - class TestingConfig(Config): - MAIL_ENABLED = False - TESTING = True - -To enable such a config you just have to call into :meth:`~domain.Config.from_object`:: - - domain.config.from_object('config.ProductionConfig') - -Note that :meth:`~protean.Config.from_object` does not instantiate the class object. If you need to instantiate the -class, such as to access a property, then you must do so before calling :meth:`~protean.Config.from_object`:: - - from config import ProductionConfig - domain.config.from_object(ProductionConfig()) - - # Alternatively, import via string: - from werkzeug.utils import import_string - cfg = import_string('config.ProductionConfig')() - domain.config.from_object(cfg) - -Instantiating the configuration object allows you to use ``@property`` in your configuration classes:: - - class Config(object): - """Base config, uses staging database server.""" - TESTING = False - POSTGRES_SERVER = 'example.com' - - @property - def DATABASE_URI(self): # Note: all caps - return f"postgresql://postgres:postgres@{self.POSTGRES_SERVER}:5432/postgres" - - class ProductionConfig(Config): - """Uses production database server.""" - POSTGRES_SERVER = 'mydomain.com' - - class DevelopmentConfig(Config): - POSTGRES_SERVER = 'localhost' - - class TestingConfig(Config): - POSTGRES_SERVER = 'test.dev' - DATABASE_URI = 'sqlite:///:memory:' - -There are many different ways and it's up to you how you want to manage your configuration files. However here are a -few good recommendations: - -- Keep a default configuration in version control. Either populate the config with this default configuration or - import it in your own configuration files before overriding values. -- Use an environment variable to switch between the configurations. - This can be done from outside the Python interpreter and makes development and deployment much easier because - you can quickly and easily switch between different configs without having to touch the code at all. If you are - working often on different projects you can even create your own script for sourcing that activates a virtualenv - and exports the development configuration for you. diff --git a/docs-sphinx/user/domain-definition.rst b/docs-sphinx/user/domain-definition.rst deleted file mode 100644 index 2b71c12d..00000000 --- a/docs-sphinx/user/domain-definition.rst +++ /dev/null @@ -1,524 +0,0 @@ -Defining Domain Concepts -======================== - -DDD is all about representing domain concepts as closely as possible in code. To accomplish this, DDD outlines a set of tactical patterns that we could use to model the domain. When you want to model domain concepts that have a unique identity and that change continuously over a long period of time, you represent them as Aggregates. - -Aggregates are fundamental, coarse-grained building blocks of a domain model. They are conceptual wholes - they enclose all behaviors and data of a distinct domain concept. Aggregates are often composed of one or more :doc:`entities-and-vos`, that work together to codify the concept. - -In a sense, Aggregates act as ``Root Entities`` - they manage the lifecycle of all Entities and Value Objects enclosed within them. Put another way, all elements enclosed within an Aggregate are only accessible through the Aggregate itself - it acts as a consistency boundary and protects data sanctity within the cluster. - -Aggregates ----------- - -Aggregates are defined with the :meth:`~protean.Domain.aggregate` decorator: - -.. code-block:: python - - from protean.domain import Domain - from protean.core.field.basic import Date, String - - publishing = Domain(__name__) - - @publishing.aggregate - class Post: - name = String(max_length=50) - created_on = Date() - -In the example, ``Post`` is defined to be an Aggregate with two fields, ``name`` and ``created_on``, and registered with the ``publishing`` domain. - -You can also define the Aggregate as a subclass of :class:`~protean.core.BaseAggregate` and register it manually with the domain: - -.. code-block:: python - - >>> class Post(BaseAggregate): - ... name = String(max_length=50) - ... created_on = Date() - - ... publishing.register(Post) - - -Fields ------- - -Aggregates enclose a number of fields and associate behaviors with them to represent a domain concept. The fields declared in the aggregate are available as a map in :attr:`declared_fields`. - -This example defines a ``Person`` Aggregate, which has a ``first_name`` and ``last_name``. - -.. code-block:: python - - @domain.aggregate - class Person: - first_name = String(max_length=30) - last_name = String(max_length=30) - -``first_name`` and ``last_name`` are fields of the aggregate. Each field is specified as a class attribute, that ultimately maps to a database column (or node if you are using a NoSQL database): - -.. code-block:: python - - >>> Person.meta_.declared_fields - {'first_name': , - 'last_name': , - 'id': } - -The full list of available fields can be found in :doc:`fields`. - -Initialization --------------- - -You can initialize the values of a post object as key-value pairs: - -.. code-block:: python - - >>> person = Person(first_name="John", last_name="Doe") - >>> person.to_dict() - {'first_name': 'John', - 'last_name': 'Doe', - 'id': '6c5e7221-e0c6-4901-9a4f-c9218096b0c2'} - -Identity --------- - -If you observe the output of ``person`` object carefully, you will see a field called `id` associated automatically with the ``Person`` aggregate. - -``Aggregates`` (and ``Entities``) should always have a unique identity associated with them. By default, unique identifier field named ``id`` is added automatically by Protean. ``id`` is an :ref:`field-type-auto` field and populated with the strategy specified for :attr:`~protean.Config.ID_STRATEGY` in config. - -The identifier field is also available among ``declared_fields``, or you can access it via the special ``id_field`` meta attribute: - -.. code-block:: python - - >>> Person.meta_.declared_fields - {'first_name': , - 'last_name': , - 'id': } - >>> Person.meta_.id_field - - -By default, Protean uses the ``UUID`` Identity strategy and aggregates generate ``UUID`` values on initialization: - -.. code-block:: python - - >>> p = Person(first_name='John', last_name='Doe') - >>> p.to_dict() - {'first_name': 'John', - 'last_name': 'Doe', - 'id': '6667ec6e-d568-4ac5-9d66-0c9c4e3a571b'} - -The identifier can be optionally overridden by setting ``identifier=True`` to a field. Fields marked as identifiers are both ``required`` and ``unique`` and can contain either :class:`protean.core.field.basic.Integer` or :class:`protean.core.field.basic.String` values. - -In the example below, the default identifier has been overridden with an explicit ``email`` String field: - -.. code-block:: python - - @domain.aggregate - class Person: - email = String(identifier=True) - first_name = String(max_length=30) - last_name = String(max_length=30) - -When overridden, the application is responsible for initializing the entity with a unique identifier value (unless the field is of type :class:`protean.core.field.basic.Auto`): - -.. code-block:: python - - >>> p = Person(first_name='John', last_name='Doe') - ValidationError Traceback (most recent call last) - ... - ValidationError: {'email': ['is required']} - - -Inheriting Aggregates ---------------------- - -Often, you may want to put some common information into a number of Aggregates into your domain. A Protean Aggregate can be inherited from another Aggregate class: - -.. code-block:: python - - @domain.aggregate - class TimeStamped: - created_at = DateTime(default=datetime.utcnow) - updated_at = DateTime(default=datetime.utcnow) - - @domain.aggregate - class User(TimeStamped): - name = String(max_length=30) - timezone = String(max_length=30) - -The ``User`` aggregate will have three fields of its own including an identifier, and two derived from its parent class: - -.. code-block:: python - - >>> User.meta_.declared_fields - {'name': , - 'timezone': , - 'created_at': , - 'updated_at': , - 'id': } - - >>> user = User(name='John Doe', address='101, Timbuktu St.') - >>> user.to_dict() - {'name': 'John Doe', - 'timezone': None, - 'created_at': datetime.datetime(2021, 7, 7, 16, 35, 10, 799318), - 'updated_at': datetime.datetime(2021, 7, 7, 16, 35, 10, 799327), - 'id': '557770a2-5f34-4f80-895b-c38f2679766b'} - -If you do not want the parent Aggregate to be instantiable, you can mark it as abstract. - -Declaring Abstract Base Aggregates ----------------------------------- - -By default, Protean Aggregates and Entities are concrete and instantiable: - -.. code-block:: python - - @domain.aggregate - class Person: - first_name = String(max_length=30) - last_name = String(max_length=30) - -``Person`` is concrete and can be instantiated: - - >>> Person.meta_.abstract - False - >>> person = Person(first_name='John', last_name='Doe') - >>> person.to_dict() - {'first_name': 'John', - 'last_name': 'Doe', - 'id': '6667ec6e-d568-4ac5-9d66-0c9c4e3a571b'} - -You can optionally declare an Aggregate as abstract with the ``abstract`` :ref:`Meta option `: - -.. code-block:: python - - @domain.aggregate - class AbstractPerson: - first_name = String(max_length=30) - last_name = String(max_length=30) - - class Meta: - abstract = True - -An Aggregate marked as ``abstract`` cannot be instantiated. It's primary purpose is to serve as a base class for other aggregates. - -.. code-block:: python - - >>> AbstractPerson.meta_.abstract - True - -Trying to instantiate an abstract Aggregate will raise a `NotSupportedError` error:: - -.. code-block:: python - - >>> person = AbstractPerson() - NotSupportedError Traceback (most recent call last) - ... - NotSupportedError: AbstractPerson class has been marked abstract and cannot be instantiated - -An Aggregate derived from an abstract parent is concrete by default: - -.. code-block:: python - - class Adult(AbstractPerson): - age = Integer(default=21) - -``Adult`` class is instantiable:: - -.. code-block:: python - - >>> Adult.meta_.abstract - False - >>> adult = Adult(first_name='John', last_name='Doe') - >>> adult.to_dict() - {'first_name': 'John', - 'last_name': 'Doe', - 'age': 21, - 'id': '6667ec6e-d568-4ac5-9d66-0c9c4e3a571b'} - -An Aggregate can be marked as ``abstract`` at any level of inheritance. - -An important point to note is that Aggregates marked abstract do not have an identity. - -.. code-block:: python - - @domain.aggregate - class TimeStamped(BaseAggregate): - created_at = DateTime(default=datetime.utcnow) - updated_at = DateTime(default=datetime.utcnow) - - class Meta: - abstract=True - -In this example, the base Aggregate ``TimeStamped`` will not have an automatically generated ``id`` field: - -.. code-block:: python - - >>> TimeStamped.meta_.declared_fields - {'created_at': , - 'updated_at': } - -Abstract Aggregates cannot have an explicit identifier field either: - -.. code-block:: python - - @domain.aggregate - class User(BaseAggregate): - email = String(identifier=True) - name = String(max_length=55) - - class Meta: - abstract=True - -Trying to declare one will throw an :class:`~protean.exceptions.IncorrectUsageError` exception. - -Metadata --------- - -Aggregate metadata is available under the ``meta_`` attribute of an aggregate object in runtime, and is made up of two parts: - -Meta options -```````````` - -Options that control Aggregate behavior, such as its database provider, the name used to persist the aggregate entity, or if the Aggregate is abstract. These options can be overridden with an inner ``class Meta``, like so: - -.. code-block:: python - - @domain.aggregate - class Person: - first_name = String(max_length=30) - last_name = String(max_length=30) - - class Meta: - provider = 'nosql' - -The overridden attributes are reflected in the ``meta_`` attribute: - - >>> Person.meta_.provider - 'nosql' - -Available options are: - -.. _user-aggregate-meta-abstract: - -.. py:data:: abstract - - The flag used to mark an Aggregate as abstract. If abstract, the aggregate class cannot be instantiated and needs to be subclassed. Refer to the section on :ref:`entity-abstraction` for a deeper discussion. - - .. code-block:: python - - @domain.aggregate - class Person: - first_name = String(max_length=30) - last_name = String(max_length=30) - - class Meta: - abstract = True - - Trying to instantiate an abstract Aggregate will throw a ``NotSupportedError``: - - >>> p = Person(first_name='John', last_name='Doe') - NotSupportedError Traceback (most recent call last) - ... - NotSupportedError: Person class has been marked abstract and cannot be instantiated - -.. py:data:: provider - - The database that the aggregate is persisted in. - - Aggregates are connected to underlying data stores via providers. The definitions of these providers are supplied within the ``DATABASES`` key as part of the Domain's configuration during initialization. Protean identifies the correct data store, establishes the connection and takes the responsibility of persisting the data. - - Protean requires at least one provider, named ``default``, to be specified in the configuration. When no provider is explicitly specified, Aggregate objects are persisted into the ``default`` data store. - - Configuration: - - .. code-block:: python - - ... - DATABASES = { - 'default': { - 'PROVIDER': 'protean_sqlalchemy.provider.SAProvider' - }, - "nosql": { - "PROVIDER": "protean.adapters.repository.elasticsearch.ESProvider", - "DATABASE": Database.ELASTICSEARCH.value, - "DATABASE_URI": {"hosts": ["localhost"]}, - }, - } - ... - - You can then connect the provider explicitly to an Aggregate by its ``provider`` Meta option: - - .. code-block:: python - - @domain.aggregate - class Person: - first_name = String(max_length=30) - last_name = String(max_length=30) - - class Meta: - provider = 'nosql' - - Refer to :ref:`user-persistence` for an in-depth discussion about persisting to databases. - -.. py:data:: model - - Protean automatically constructs a representation of the aggregate that is compatible with the configured database. While the generated model suits most use cases, you can also explicitly construct a model and associate it with the aggregate. - - .. code-block:: python - - import sqlalchemy - - @domain.aggregate - class Person: - first_name = String(max_length=30) - last_name = String(max_length=30) - - @domain.model(entity_cls=Person) - class PersonModel: - name = sqlalchemy.Column(sqlalchemyText) - - Note that custom models are associated with a specific database type. The model is used only when database of the right type is active. Refer to :ref:`aggregate-custom-models` for more information. - -.. py:data:: schema_name - - The name to store and retrieve the aggregate from the persistence store. By default, ``schema_name`` is the snake case version of the Aggregate's name. - - .. code-block:: python - - @domain.aggregate - class UserProfile: - name = String() - - ``schema_name`` is available under ``meta_``: - - >>> UserProfile.meta_.schema_name - 'user_profile' - -Reflection -`````````` - -Aggregates are decorated with additional attributes that you can use to examine the aggregate structure in runtime. The following meta attributes are available: - -.. _user-aggregate-meta-declared-fields: - -.. py:data:: declared_fields - - A map of fields explicitly declared in the Aggregate. - - >>> @domain.aggregate - ... class Person: - ... first_name = String(max_length=30) - ... last_name = String(max_length=30) - ... - >>> Person.meta_.declared_fields - {'first_name': , - 'last_name': , - 'id': } - -.. _user-aggregate-meta-id-field: - -.. py:data:: id_field - - The identifier field configured for the Entity or Aggregate. A field can be marked as an identifier by setting the ``identifier=True`` option. - - >>> @domain.aggregate - ... class Person: - ... email = String(identifier=True) - ... first_name = String(max_length=30) - ... last_name = String(max_length=30) - ... - >>> Person.meta_.id_field - - >>> Person.meta_.id_field.attribute_name - 'email' - - When not explicitly identified, an identifier field named ``id`` of type :ref:`field-type-auto` is added automatically to the Aggregate:: - - >>> @domain.aggregate - ... class Person: - ... first_name = String(max_length=30) - ... last_name = String(max_length=30) - ... - >>> Person.meta_.declared_fields - {'first_name': , - 'last_name': , - 'id': } - >>> Person.meta_.id_field - - -.. py:data:: attributes - - A map of all fields, including :ref:`user-aggregate-meta-value-object-fields` and :ref:`user-aggregate-meta-reference-fields` fields. These attribute names are used during persistence of Aggregates, unless overridden by :ref:`api-fields-referenced-as`. - - .. code-block:: python - - @domain.entity(aggregate_cls="Account") - class Profile: - email = String(required=True) - name = String(max_length=50) - password = String(max_length=50) - - @domain.value_object - class Balance: - currency = String(max_length=3) - amount = Float() - - @domain.aggregate - class Account: - account_type = String(max_length=25) - balance = ValueObject(Balance) - profile = Reference(Profile) - - All fields are available under ``meta_``: - - >>> Account.meta_.attributes - {'account_type': , - 'balance_currency': , - 'balance_amount': , - 'profile_id': , - 'id': } - -.. _user-aggregate-meta-value-object-fields: - -.. py:data:: value_object_fields - - A map of fields derived from value objects embedded within the Aggregate. - - .. code-block:: python - - @domain.value_object - class Balance: - currency = String(max_length=3) - amount = Float() - - @domain.aggregate - class Account: - account_type = String(max_length=25) - balance = ValueObject(Balance) - - The fields are now available as part of ``meta_`` attributes: - - >>> Account.meta_.value_object_fields - {'balance_currency': , - 'balance_amount': } - -.. _user-aggregate-meta-reference-fields: - -.. py:data:: reference_fields - - A map of reference fields (a.k.a Foreign keys, if you are familiar with the relational world) embedded within the Aggregate. - - .. code-block:: python - - @domain.aggregate - class Post: - content = Text(required=True) - author = Reference("Author") - - @domain.entity(aggregate_cls="Post") - class Author: - first_name = String(required=True, max_length=25) - last_name = String(max_length=25) - - An attribute named `author_id` (_) is automatically generated and attached to the Aggregate:: - - >>> Post.meta_.reference_fields - {'author_id': } diff --git a/docs-sphinx/user/entities-and-vos.rst b/docs-sphinx/user/entities-and-vos.rst deleted file mode 100644 index 3cc859cd..00000000 --- a/docs-sphinx/user/entities-and-vos.rst +++ /dev/null @@ -1,388 +0,0 @@ -Aggregate Elements -================== - -Aggregates, by definition, cluster multiple domain elements together to represent a concept. They are usually composed of two kinds of elements: those with unique identities (``Entities``) and those without (``Value Objects``). - -Entities --------- - -Entities represent unique objects in the domain model. They are very similar to Aggregates except that they don't manage other objects. In fact, Aggregates are actually entities that have taken on the additional responsibility of managing the lifecycle of one or more related entities. - -Entities are identified by their unique identities that remain the same throughout its life - they are not defined by their attributes or values. For example, a passenger in the airline domain is an Entity. The passenger's identity remains the same across multiple seat bookings, even if her profile information (name, address, etc.) changes over time. - -It is also important to note that an Entity in one domain may not be an Entity in another. For example, a seat is an Entity if airlines distinguish each seat uniquely on every flight. If passengers are not allotted specific seats, then a seat can be considered a ``ValueObject``, as one seat can be exchanged with another. We will explain Value Objects in detail further in this section. - -You can define and register an Entity by annotating it with the ``@domain.entity`` decorator: - -.. code-block:: python - - from protean.domain import Domain - from protean.core.field.basic import Date, String - - publishing = Domain(__name__) - - @publishing.aggregate - class Post: - name = String(max_length=50) - created_on = Date() - - @publishing.entity(aggregate_cls=Post) - class Comment: - content = String(max_length=500) - -An Entity's Aggregate can also be specified as an attribute of the ``Meta`` class: - -.. code-block:: python - - @publishing.entity - class Comment: - content = String(max_length=500) - - class Meta: - aggregate_cls = Post - -Properties -`````````` - -Entities share all traits of Aggregates like id-based equality, inheritance, and abstraction, except that they cannot enclose other entities. They usually map 1-1 with structures in the persistent store (tables or documents) and only enclose basic fields or Value Objects. - -.. // FIXME Unimplemented Feature - -Trying to specify other entity fields throws a ``IncorrectUsageError``. - -Relationships -------------- - -Protean provides multiple options with which Aggregates can weave object graphs with enclosed Entities. We will explore the different relationships between an Aggregate and its enclosed Entities with the example domain below. - -.. code-block:: python - - @publishing.aggregate - class Post: - title = String(max_length=50) - created_on = Date(default=datetime.utcnow) - - stats = HasOne('Statistic') - comments = HasMany('Comment') - - - @publishing.entity(aggregate_cls=Post) - class Statistic: - likes = Integer() - dislikes = Integer() - post = Reference(Post) - - - @publishing.entity(aggregate_cls=Post) - class Comment: - content = String(max_length=500) - post = Reference(Post) - added_at = DateTime() - -HasOne -`````` - -A `HasOne` field establishes a ``has-one`` relation with the remote entity. In the example above, ``Post`` has exactly one ``Statistic`` record associated with it. - -.. code-block:: python - - >>> post = Post(title='Foo') - >>> post.stats = Statistic(likes=10, dislikes=1) - >>> current_domain.repository_for(Post).add(post) - -HasMany -``````` - -A `HasMany` field establishes a ``one-to-many`` relation with the remote entity. In the example above, ``Post`` can be associated with one or more comments. - -Field values can be added with field-specific utility methods: - -.. code-block:: python - - >>> post = Post(title='Foo') - >>> comment1 = Comment(content='bar') - >>> comment2 = Comment(content='baz') - >>> post.add_comments([comment1, comment2]) - >>> current_domain.repository_for(Post).add(post) - - >>> post.remove_comments(comment2) - >>> current_domain.repository_for(Post).add(post) - -Reference -````````` - -A ``Reference`` field establishes the opposite relationship with the parent at the data level. Entities that are connected by HasMany and HasOne relationships can reference their owning object. - -.. code-block:: python - - >>> reloaded_post = current_domain.repository_for(Post).get(post) - >>> assert reloaded_post.comments[0].post == reloaded_post - True - -Value Objects -------------- - -A Value Object is a domain element that represents a distinct domain concept, with attributes, behavior and validations built into them. They tend to act primarily as data containers, usually enclosing attributes of primitive types. - -Consider the simple example of an Email Address. A User's `Email` can be treated as a simple "String." If we do so, validations that check for the value correctness (an email address) are either specified as part of the User lifecycle methods (in `save`, `before_save`, etc.) or as independent business logic present in the services layer. - -But an `Email` is more than just another string in the system (say like First Name or Last Name). It has well-defined, explicit rules associated with it, like: - -* The presence of an ``@`` symbol -* A string with acceptable characters (like ``.`` or ``_``) before the ``@`` symbol -* A valid domain URL right after the ``@`` symbol -* The domain URL to be among the list of acceptable domains, if defined -* A total length of less 255 characters -* and so on. - -So it makes better sense to make `Email` a Value Object, with a simple string representation to the outer world, but having a distinct `local_part` (the part of the email address before `@`) and `domain_part` (the domain part of the address). Any value assignment has to satisfy the domain rules listed above. - -Equality -```````` - -Two value objects are considered to be equal if their values are equal. - -.. code-block:: python - - @domain.value_object - class Balance: - currency = String(max_length=3, required=True) - amount = Float(required=True) - -.. code-block:: python - - >>> bal1 = Balance(currency='USD', amount=100.0) - >>> bal2 = Balance(currency='USD', amount=100.0) - >>> bal3 = Balance(currency='CAD', amount=100.0) - - >>> bal1 == bal2 - True - >>> bal1 == bal3 - False - -Identity -```````` - -Value Objects do not have unique identities. - -.. // FIXME Unimplemented Feature - -Unlike Aggregates and Entities, Value Objects do not have any inbuilt concept of unique identities. Trying to mark a Value Object field as ``unique = True`` or ``identifier = True`` will throw a :class:`~protean.exceptions.IncorrectUsageError` exception. - -.. code-block:: python - - >>> bal1.meta_.declared_fields - {'currency': , - 'amount': } - - >>> bal1.meta_.id_field - Traceback (most recent call last): - File "", line 1, in - bal1.meta_.id_field - AttributeError: 'ContainerMeta' object has no attribute 'id_field' - -Immutability -```````````` - -.. // FIXME Unimplemented Feature - -A Value Object cannot be altered once initialized. Trying to do so will throw a ``TypeError``. - -.. code-block:: python - - >>> bal1 = Balance(currency='USD', amount=100.0) - - >>> bal1.currency = 'CAD' - Traceback (most recent call last): - File "", line 1, in - bal1.currency = 'CAD' - TypeError: value object is immutable - -Embedding Value Objects ------------------------ - -Value Objects can be embedded into Aggregates and Entities as part of their attributes. - -.. code-block:: python - - @domain.value_object - class Money: - currency = String(max_length=3) - amount = Float() - - @domain.aggregate - class Account: - name = String(max_length=50) - balance = ValueObject(Money) - -.. code-block:: python - - >>> Account.meta_.attributes - {'name': , - 'balance_currency': , - 'balance_amount': , - 'id': } - -As visible in the output above, the names of Value Object attributes are generated dynamically. The names are a combination of the attribute name in the enclosed container and the names defined in the Value Object, separated by underscores. So `currency` and `amount` are available as `balance_currency` and `balance_amount` in the ``Account`` Aggregate. - -You can override these automatically generated names with the `referenced_as` option in the Value Object: - -.. code-block:: python - - @domain.value_object - class Money: - currency = String(max_length=3) - amount = Float(referenced_as="amt") - -The supplied attribute name is used as-is in enclosed containers: - -.. code-block:: python - - >>> Account.meta_.attributes - {'name': , - 'balance_currency': , - 'amt': , - 'id': } - -Examples --------- - -Email -````` - -.. code-block:: python - - @domain.value_object - class Email: - """An email address value object, with two identified parts: - * local_part - * domain_part - """ - - # This is the external facing data attribute - address = String(max_length=254, required=True) - - def __init__(self, *template, local_part=None, domain_part=None, **kwargs): - """ `local_part` and `domain_part` are internal attributes that capture - and preserve the validity of an Email Address - """ - - super(Email, self).__init__(*template, **kwargs) - - self.local_part = local_part - self.domain_part = domain_part - - if self.local_part and self.domain_part: - self.address = '@'.join([self.local_part, self.domain_part]) - else: - raise ValidationError("Email address is invalid") - - @classmethod - def from_address(cls, address): - """ Construct an Email VO from an email address. - - email = Email.from_address('john.doe@gmail.com') - - """ - if not cls.validate(address): - raise ValueError('Email address is invalid') - - local_part, _, domain_part = address.partition('@') - - return cls(local_part=local_part, domain_part=domain_part) - - @classmethod - def from_parts(cls, local_part, domain_part): - """ Construct an Email VO from parts of an email address. - - email = Email.from_parths(local_part='john.doe', domain_part='@gmail.com') - - """ - return cls(local_part=local_part, domain_part=domain_part) - - @classmethod - def validate(cls, address): - """ Business rules of Email address """ - if type(address) is not str: - return False - if '@' not in address: - return False - if len(address) > 255: - return False - - return True - -Address -``````` - -.. code-block:: python - - @domain.value_object - class Address: - address1 = String(max_length=255, required=True) - address2 = String(max_length=255) - address3 = String(max_length=255) - city = String(max_length=25, required=True) - state = String(max_length=25, required=True) - country = String(max_length=2, required=True, choices=CountryEnum) - zip = String(max_length=6, required=True) - - def validate_with_canada_post(self): - return CanadaPostService.verify(self.to_dict()) - -Account Balance -``````````````` - -An Account's Balance consists of two parts: a Currency (string) and an Amount (float). It may have restrictions like positive balance and supported currencies. - -.. code-block:: python - - class Currency(Enum): - """ Set of choices for the status""" - USD = 'USD' - INR = 'INR' - CAD = 'CAD' - - - @domain.value_object - class Balance: - """A composite amount object, containing two parts: - * currency code - a three letter unique currency code - * amount - a float value - """ - - currency = String(max_length=3, required=True, choices=Currency) - amount = Float(required=True, min_value=0.0) - -Temperature -``````````` - -A valid Temperature contains two parts, a scale (Celsius or Fahrenheit) and a temperature integer value. The application may want to place restrictions on a range of acceptable values, and specify that only positive temperature values are allowed. - -.. // FIXME Unimplemented Feature - choices can be a `list` - -.. code-block:: python - - @domain.value_object - class Temperature: - scale = String(max_length=1, required=True, choices=['C', 'F']) - degrees = Integer(required=True, min_value=-70, max_value=500) - - -Account -``````` - -The ``Account`` entity below encloses an ``Email`` Value Object and is part of a ``Profile`` Aggregate. - -.. code-block:: python - - @domain.entity(aggregate_cls='Profile') - class Account: - email = ValueObject(Email, required=True) - password = String() - - @domain.aggregate - class Profile: - first_name = String(max_length=50) - last_name = String(max_length=50) - account = HasOne(Account) diff --git a/docs-sphinx/user/event-sourcing.rst b/docs-sphinx/user/event-sourcing.rst deleted file mode 100644 index 49a7b3ca..00000000 --- a/docs-sphinx/user/event-sourcing.rst +++ /dev/null @@ -1,106 +0,0 @@ -Event Sourcing -============== - -You can choose to store all application data in the form of events. Event sourcing persists the state of a business entity such an Order or a Customer as a sequence of state-changing events. Whenever the state of a business entity changes, a new event is appended to the list of events. - -You can use Event-Sourced Aggregates to represent such business entities that are backed by an event store. - -Event-Sourced Aggregates ------------------------- - -Event-Sourced Aggregates are defined with the :meth:`~protean.Domain.event_sourced_aggregate` decorator: - -.. code-block:: python - - from protean.domain import Domain - from protean.fields import Date, String - - identity = Domain(__name__) - - @identity.event_sourced_aggregate - class User: - first_name = String(max_length=50) - email = String(required=True) - joined_on = Date() - -Similar to Aggregates, an Identifier field named `id` is made available in the aggregate if no identifier fields are explicitly provided. - -Storing Events --------------- - -Event-Sourced Aggregates raise events as part of their processing, with the `raise_` method: - -.. code-block:: python - :emphasize-lines: 12 - - from my_app.domain import identity - - @identity.event_sourced_aggregate - class User: - first_name = String(max_length=50) - email = String(required=True) - joined_on = Date() - - @classmethod - def register(cls, email, password): - user = cls(email=email, password=password) - user.raise_(Registered(email=email, password=password)) - - return user - -Protean provides an In-Memory Event Store for testing purposes, but supports |MessageDB| for development and production environments. - -.. // FIXME Add documentation on MessageDB specific characteristics like format of streams, snapshots, etc. - -You can configure MessageDB as the Event Store in config: - -.. code-block:: python - - EVENT_STORE = { - "PROVIDER": "protean.adapters.event_store.message_db.MessageDBStore", - "DATABASE_URI": "postgresql://message_store@localhost:5433/message_store", - } - -You can retrieve the repository for an Event-Sourced Aggregate with `repository_for`: - -.. code-block:: python - - >>> from protean.globals import current_domain - - >>> current_domain.repository_for(User) - -But unlike Aggregates, there is no way to query records because all data is stored purely in the form of events. The only methods supported by an Event-Sourced Repository are: - -#. ``add`` - - `add` persists new events to the event store, typically on committing the Unit of Work. - -#. ``get`` - - `get` rehydrates the aggregate by its ID from the event store and returns the latest snapshot of the aggregate object. - -Snapshots ---------- - -Protean stores regular snapshots of event-sourced aggregates to optimize re-hydration performance. These snapshots are automatically used when repositories retrieve aggregates from the event store. - -By default, Protean is configured to store a snapshot after every 10 events. You can customize this interval with the ``SNAPSHOT_THRESHOLD`` config flag: - -.. code-block:: python - - # config.py - class Config: - SNAPSHOT_THRESHOLD = 25 - -Optimistic Concurrency ----------------------- - -All Event-Sourced Aggregates hold a ``_version`` attribute that is used to implement optimistic concurrency controls. The version number is incremented on every new event in the aggregate stream. An aggregate loaded from the event store will hold the latest version number in its ``_version`` attribute. - -Each event carries an ``expected_version`` that should match the ``_version`` of the loaded aggregate. If there is a mismatch in the versions, the event is discarded with a ``ExpectedVersionError``. - -.. // FIXME Add image depicting Optimistic concurrency at work. - -.. |MessageDB| raw:: html - - MessageDB diff --git a/docs-sphinx/user/eventing.rst b/docs-sphinx/user/eventing.rst deleted file mode 100644 index 5bfdd6c3..00000000 --- a/docs-sphinx/user/eventing.rst +++ /dev/null @@ -1,187 +0,0 @@ -Processing Events -================= - -Most applications have a definite state - they remember past user input and interactions. It is advantageous to model these past changes as a series of discrete events. Domain events happen to be those activities that domain experts care about and represent what happened as-is. - -Domain events are the primary building blocks of a domain in Protean. They perform two major functions: - -1. They **facilitate eventual consistency** in the same bounded context or across. - This makes it possible to define invariant rules across Aggregates. Every change to the system touches one and only one Aggregate, and other state changes are performed in separate transactions. - - Such a design eliminates the need for two-phase commits (global transactions) across bounded contexts, optimizing performance at the level of each transaction. - -2. Events also **keep boundaries clear and distinct** among bounded contexts. - Each domain is modeled in the architecture pattern that is appropriate for its use case. Events propagate information across bounded contexts, thus helping to sync changes throughout the application domain. - -Defining Domain Events ----------------------- - -A Domain event is defined with the :meth:`~protean.Domain.event` decorator: - -.. code-block:: python - - @domain.event(aggregate_cls='Role') - class UserRegistered: - name = String(max_length=15, required=True) - email = String(required=True) - timezone = String() - -Often, a Domain event will contain values and identifiers to answer questions about the activity that initiated the event. These values, such as who, what, when, why, where, and how much, represent the state when the event happened. - -.. // FIXME Unimplemented Feature - -Since Events are essentially Data Transfer Objects (DTOs), they can only hold simple field structures. You cannot define them to have associations or value objects. - -Ideally, the Event only contains values that are directly relevant to that Event. A receiver that needs more information should listen to pertinent other Events and keep its own state to make decisions later. The receiver shouldn't query the current state of the sender, as the state of the sender might already be different from the state it had when it emitted the Event. - -Because we observe Domain Events from the outside after they have happened, we should name them in the past tense. So "StockDepleted "is a better choice than the imperative "DepleteStock "as an event name. - -Raising Events --------------- - -.. image:: /images/raising-events.jpg - :alt: Raising Events - :scale: 100% - -Domain events are best bubbled up from within Aggregates responding to the activity. - -In the example below, the ``Role`` aggregate raises a ``RoleAdded`` event when a new role is added to the system. - -.. code-block:: python - - ... - - @domain.aggregate - class Role: - name = String(max_length=15, required=True) - created_on = DateTime(default=datetime.today()) - - @classmethod - def add_new_role(cls, params): - """Factory method to add a new Role to the system""" - new_role = Role(name=params['name']) - - current_domain.publish(RoleAdded(role_name=new_role.name, added_on=new_role.created_on)) - - return new_role - -.. // FIXME Unimplemented Feature : Discussion #354 - -Adding a new role generates a ``RoleAdded`` event:: - - >>> role = Role.add_new_role({'name': 'ADMIN'}) - >>> role.events - [RoleAdded] - -UnitOfWork Schematics -``````````````````````` - -Events raised in the Domain Model should be exposed only after the changes are recorded. This way, if the changes are not persisted for some reason, like a technical fault in the database infrastructure, events are not accidentally published to the external world. - -.. // FIXME Unimplemented Feature : Discussion ??? - -In Protean, domain changes being performed in the Application layer, within *Application Services*, *Command Handlers*, and *Subscribers* for example, are always bound within a :class:`UnitOfWork`. Events are exposed to the external world only after all changes have been committed to the persistent store atomically. - -This is still a two-phase commit and is prone to errors. For example, the database transaction may be committed, but the system may fail to dispatch the events to the message broker because of technical issues. Protean supports advanced strategies that help maintain data and event sanctity to avoid these issues, as outlined in the :ref:`event-processing-strategies` section. - -Consuming Events ----------------- - -.. image:: /images/consuming-events.jpg - :alt: Consuming Events - :scale: 100% - -Subscribers live on the other side of event publishing. They are domain elements that subscribe to specific domain events and are notified by the domain on event bubble-up. - -Subscribers can: - -#. Help propagate a change to the rest of the system - across multiple aggregates - and eventually, make the state consistent. -#. Run secondary stuff, like sending emails, generating query models, populating reports, or updating cache, in the background, making the transaction itself performance-optimized. - -A Subscriber can be defined and registered with the help of ``@domain.subscriber`` decorator: - -.. code-block:: python - - @domain.subscriber(event='OrderCancelled') - class UpdateInventory: - """Update Stock Inventory and replenish order items""" - - def __call__(self, event: Dict) -> None: - stock_repo = current_domain.repository_for(Stock) - for item in event['order_items']: - stock = stock_repo.get(item['sku']) - stock.add_quantity(item['qty']) - - stock_repo.add(stock) - -Just like :ref:`user-application-services` and :ref:`command-handlers`, Subscribers should adhere to the rule of thumb of not modifying more than one aggregate instance in a transaction. - -.. _event-processing-strategies: - -Processing Strategies ---------------------- - -Protean provides fine-grained control on how exactly you want domain events to be processed. These strategies, listed in the order of their complexity below, translate to increased robustness on the event processing side. These performance optimizations and processing stability come in handy at any scale but are imperative at a larger scale. - -Depending on your application's lifecycle and your preferences, one or more of these strategies may make sense. But you can choose to start with the most robust option, ``DB_SUPPORTED_WITH_JOBS``, with minimal performance penalties. - -Event processing strategy for your domain is set in the config attribute :attr:`~protean.Config.EVENT_STRATEGY`. - -#. .. py:data:: INLINE - - This is the default and most basic option. In this mode, Protean consumes and processes events inline as they are generated. Events are not persisted and are processed in an in-memory queue. - - There is no persistence store involved in this mode, and events are not stored. If events are lost in transit for some reason, like technical faults, they are lost forever. - - This mode is best suited for testing purposes. Events raised in tests are processed immediately so tests can include assertions for side-effects of events. - - If you are processing events from within a single domain (if your application is a monolith, for example), you can simply use the built-in :class:`InlineBroker` as the message broker. If you want to exchange messages with other domains, you can use one of the other message brokers, like :class:`RedisBroker`. - - ``config.py``: - - .. code-block:: python - - ... - EVENT_STRATEGY = "INLINE" - ... - -#. .. py:data:: DB_SUPPORTED - - The ``DB_SUPPORTED`` strategy persists Events into the same persistence store in the same transaction along with the actual change. This guarantees data consistency and ensures events are never published without system changes. - - This mode also performs better than ``INLINE`` mode because events are dispatched and processed in background threads. One background process monitors the ``EventLog`` table and dispatches the latest events to the message broker. Another gathers new events from the message broker and processes them in a thread pool. - - Depending on the persistence store in use, you may need to manually run migration scripts to create the database structure. Consult :class:`EventLog` for available options. - - Note that this mode needs the :class:`Server` to be started as a separate process. If your application already runs within a server (if you have an API gateway, for example), you can run the server as part of the same process. Check :doc:`user/server` for a detailed discussion. - - ``config.py``: - - .. code-block:: python - - ... - EVENT_STRATEGY = "DB_SUPPORTED" - ... - -#. .. py:data:: DB_SUPPORTED_WITH_JOBS - - This is the most robust mode of all. In this mode, Protean routes all events through the data store and tracks each subscriber's processing as separate records. This allows you to monitor errors at the level of each subscriber process and run automatic recovery tasks, like retrying jobs, generating alerts, and running failed processes manually. - - This mode needs the :class:`Job` data structure to be created along with :class:`EventLog`. - - ``config.py``: - - .. code-block:: python - - ... - EVENT_STRATEGY = "DB_SUPPORTED_WITH_JOBS" - ... - -Best Practices --------------- - -* Your Event's name should preferably be in the past sense. Ex. `RoleAdded`, `UserProvisioned`, etc. They are representing facts that have already happened outside the system. -* Event objects are immutable in nature, so ensure you are passing all event data while creating a new event object. -* Events are simple data containers, so they should preferably have no methods. In the rare case that an event contains methods, they should be side-effect-free and return new event instances. -* Subscribers should never be constructed or invoked directly. The purpose of the message transport layer is to publish an event for system-wide consumption. So manually initializing or calling a subscriber method defeats the purpose. -* Events should enclose all the necessary information from the originating aggregate, including its unique identity. Typically, a subscriber should not have to contact the originating aggregate bounded context again for additional information because the sender's state could have changed by that time. diff --git a/docs-sphinx/user/fields.rst b/docs-sphinx/user/fields.rst deleted file mode 100644 index 061a28fc..00000000 --- a/docs-sphinx/user/fields.rst +++ /dev/null @@ -1,463 +0,0 @@ -Data Fields -=========== - -This document contains the field options and field types available in Protean, and their built-in capabilities. - -Field options -------------- - -required -```````` - -If ``True``, the field is not allowed to be blank. Default is ``False``. - -.. code-block:: python - - @domain.aggregate - class Person: - name = String(required=True) - -Leaving the field blank or not specifying a value will raise a ``ValidationError``:: - - >>> p1 = Person() - defaultdict(, {'name': ['is required']}) - ... - ValidationError: {'name': ['is required']} - -identifier -`````````` - -If ``True``, the field is the primary key for the entity. - -.. code-block:: python - - @domain.aggregate - class Person: - email = String(identifier=True) - name = String(required=True) - -The field is validated to be unique and non-blank:: - - >>> p = Person(email='john.doe@example.com', name='John Doe') - >>> p.meta_.declared_fields - {'email': , - 'name': } - >>> p = Person(name='John Doe') - ValidationError Traceback (most recent call last) - ... - ValidationError: {'email': ['is required']} - -If you don't specify ``identifier=True`` for any field in your Entity, Protean will automatically add a field called ``id`` to hold the primary key, so you don't need to set ``identifier=True`` on any of your fields unless you want to override the default primary-key behavior. - -Alternatively, you can use the ::ref:`field-type-identifier` field for primary key fields. The type of the field can be specified per domain in config with :ref:`user-configuration-identity-type`. - -default -``````` - -The default value for the field. This can be a value or a callable object. If callable, it will be called every time a new object is created. - -.. code-block:: python - - @domain.aggregate - class Adult: - name = String(max_length=255) - age = Integer(default=21) - -The default can't be a mutable object (list, set, dict, entity instance, etc.), as a reference to the same object would be used as the default value in all new entity instances. Instead, wrap the desired default in a callable. - -For example, to specify a default ``list`` for ``List`` field, use a function: - -.. code-block:: python - - def standard_topics(): - return ["Music", "Cinema", "Politics"] - - @domain.aggregate - class Adult: - name = String(max_length=255) - age = Integer(default=21) - topics = List(default=standard_topics) - -Initializing an ``Adult`` aggregate would populate the defaults when values are not specified explicitly:: - - >>> adult1 = Adult(name="John Doe") - >>> adult1.to_dict() - {'name': 'John Doe', 'age': 21, 'topics': ['Music', 'Cinema', 'Politics'], 'id': '8c0f63c0-f4c2-4f73-baad-889f63565986'} - -You can even use a lambda expression to specify an anonymous function: - -.. code-block:: python - - import random - - @domain.aggregate - class Dice: - throw = Integer(default=lambda: random.randrange(1, 6)) - -unique -`````` - -If ``True``, this field must be unique among all entities. - -.. code-block:: python - - @domain.aggregate - class Person: - name = String(max_length=255) - email = String(unique=True) - -This is enforced by entity validation. If you try to save an entity with a duplicate value in a ``unique`` field, a :ref:`validation-error` will be raised:: - - >>> p1 = Person(name='John Doe', email='john.doe@example.com') - >>> domain.repository_for(Person).add(p1) - >>> p2 = Person(name= 'Jane Doe', email='john.doe@example.com') - >>> domain.repository_for(Person).add(p2) - ValidationError Traceback (most recent call last) - ... - ValidationError: {'email': ["Person with email 'john.doe@example.com' is already present."]} - -choices -``````` - -When supplied, the value of the field is validated to be one among the specified options. - -.. code-block:: python - - class BuildingStatus(Enum): - WIP = "WIP" - DONE = "DONE" - - @domain.aggregate - class Building: - name = String(max_length=50) - floors = Integer() - status = String(choices=BuildingStatus) - -The value is generally supplied as a string during entity initialization:: - - >>> building = Building(name="Atlantis", floors=3, status="WIP") - >>> building.to_dict() - {'name': 'Atlantis', - 'floors': 3, - 'status': 'WIP', - 'id': '66562983-bd3a-4ac0-864c-2034cb6bea0d'} - -The choices are enforced during entity validation:: - - >>> building = Building(name="Atlantis", floors=3, status="COMPLETED") - ValidationError Traceback (most recent call last) - ... - ValidationError: {'status': ["Value `'COMPLETED'` is not a valid choice. Must be one of ['WIP', 'DONE']"]} - -.. _api-fields-referenced-as: - -referenced_as -````````````` - -The name used to store and retrieve the attribute's value. A field's ``referenced_as`` name is used by Protean's persistence mechanism while storing and retrieving the field. - -.. code-block:: python - - @domain.aggregate - class Person: - email = String(unique=True) - name = String(referenced_as='fullname', required=True) - -``meta_.declared_fields`` will preserve the original field name, while ``meta_.attributes`` will reflect the new name:: - - >>> Person.meta_.declared_fields - {'email': , - 'fullname': , - 'id': } - >>> Person.meta_.attributes - {'email': , - 'fullname': , - 'id': } - -TO BE DOCUMENTED - -validators -`````````` - -A list of validators to run for this field. See :ref:`Validators API Documentation ` for more information. - -error_messages -`````````````` - -If supplied, the default messages that the field will raise will be overridden. Error message keys include **required**, **invalid**, **unique**, and **invalid_choice**. Additional error message keys are specified for each field in the :ref:`field-types` section below. - -.. code-block:: python - - @domain.aggregate - class Child: - name = String(required=True, error_messages={'required': "Please specify child's name"}) - age = Integer(required=True) - -The custom error message can be observed in the ``ValidationError`` exception:: - - >>> Child() - ValidationError Traceback (most recent call last) - ... - ValidationError: {'name': ["Please specify child's name"], 'age': ['is required']} - -The error message can be formatted with additional keyword arguments: - -.. //FIXME Pending Documentation - -.. _field-types: - -Basic Fields ------------- - -.. _field-type-string: - -String -`````` - -A string field, for small- to large-sized strings. For large amounts of text, use :ref:`field-type-text`. - -Optional arguments: - -- ``max_length``: The maximum length (in characters) of the field, enforced during validation using :ref:`MaxLengthValidator `. Defaults to 255. -- ``min_length``: The minimum length (in characters) of the field, enforced during validation using :ref:`MinLengthValidator `. -- ``sanitize``: Optionally turn off HTML sanitization. Default is True. - -.. _field-type-text: - -Text -```` - -A large text field, to hold large amounts of text. Text fields do not have size constraints. - -Optional arguments: - -- ``sanitize``: Optionally turn off HTML sanitization. Default is True. - -.. _field-type-integer: - -Integer -``````` - -An integer. It uses :ref:`MinValueValidator ` and :ref:`MaxValueValidator ` to validate the input based on the values that the default database supports. - -``Integer`` has two optional arguments: - -- ``max_value``: The maximum numeric value of the field, enforced during validation using :ref:`MaxValueValidator `. -- ``min_value``: The minimum numeric value of the field, enforced during validation using :ref:`MinValueValidator `. - -Float -````` - -A floating-point number represented in Python by a float instance. - -``Float`` has two optional arguments: - -- ``max_value``: The maximum numeric value of the field, enforced during validation using :ref:`MaxValueValidator `. -- ``min_value``: The minimum numeric value of the field, enforced during validation using :ref:`MinValueValidator `. - -Boolean -``````` - -A ``True``/``False`` field. - -.. code-block:: python - - @domain.aggregate - class Person: - name = String(required=True) - adult = Boolean() - -The default value is ``None`` when ``default`` option isn’t defined:: - - >>> person = Person(name='John Doe') - >>> p4.to_dict() - {'name': 'John Doe', - 'adult': None, - 'id': 'e30e97fb-540b-43f0-8fc9-937baf413080'} - -.. _field-type-auto: - -Auto -```` - -Automatically-generated unique identifiers. By default, all entities and aggregates hold an ``Auto`` field named ``id`` that acts as their unique identifier. You cannot supply values explicitly to ``Auto`` fields - they are self-generated. - -.. code-block:: python - - @domain.aggregate - class Person: - first_name = String(max_length=30) - last_name = String(max_length=30) - -The identifier field is available as among ``declared_fields`` and is also accessible via the special ``id_field`` meta attribute:: - - >>> Person.meta_.declared_fields - {'first_name': , - 'last_name': , - 'id': } - >>> Person.meta_.id_field - - -An ``Auto`` field is unique by default:: - - >>> vars(Person.meta_.id_field) - ... - {'field_name': 'id', - 'attribute_name': 'id', - 'identifier': True, - 'default': None, - 'unique': True, - 'required': False, - ... - -At the same time, ``Auto`` fields cannot be marked as ``required`` because their values cannot be specified explicitly. - -.. _field-type-identifier: - -Identifier -`````````` - -.. //FIXME Pending Documentation - -Date -```` - -A date, represented in Python by a ``datetime.date`` instance. - -.. code-block:: python - - @domain.aggregate - class Person: - name = String(required=True) - born_on = Date(required=True) - -The date can be specified as a ``datetime.date`` object:: - - >>> p = Person(name="John Doe", born_on=datetime(1962, 3, 16).date()) - >>> p.to_dict() - {'name': 'John Doe', - 'born_on': datetime.date(1962, 3, 16), - 'id': '0f9d4f86-a47c-48ec-bb14-8b8bb8a65ae3'} - -Or as a string, which will be parsed by ``dateutil.parse``:: - - >>> p = Person(name="John Doe", born_on="2018-03-16") - >>> p.to_dict() - {'name': 'John Doe', - 'born_on': datetime.date(1962, 3, 16), - 'id': '0f9d4f86-a47c-48ec-bb14-8b8bb8a65ae3'} - -DateTime -```````` - -A date and time, represented in Python by a ``datetime.datetime`` instance. - -.. code-block:: python - - @domain.aggregate - class User: - email = String(required=True) - created_at = DateTime(required=True) - -The timestamp can be specified as a ``datetime.datetime`` object:: - - >>> u = User(email="john.doe@example.com", created_at=datetime.utcnow()) - >>> u.to_dict() - {'email': 'john.doe@example.com', - 'created_at': datetime.datetime(2021, 6, 25, 22, 55, 19, 28744), - 'id': '448f885e-be8f-4968-bb47-c637eabc21f8'} - -Or as a string, which will be parsed by ``dateutil.parse``:: - - >>> u = User(email="john.doe@example.com", created_at="2018-03-16 10:23:32") - >>> u.to_dict() - {'email': 'john.doe@example.com', - 'created_at': datetime.datetime(2018, 3, 16, 10, 23, 32), - 'id': '1dcb17e1-64e9-43ef-b9bd-802b8a004765'} - -Container Fields ----------------- - -List -```` - -A collection field that accepts values of a specified basic field type. - -.. code-block:: python - - @domain.aggregate - class User: - email = String(max_length=255, required=True, unique=True) - roles = List() # Defaulted to hold String Content Type - -``roles`` now accepts a list of strings: - - >>> user = User(email='john.doe@example.com', roles=['ADMIN', 'EDITOR']) - >>> user.to_dict() - {'email': 'john.doe@example.com', - 'roles': ['ADMIN', 'EDITOR'], - 'id': 'ef2b222b-de5c-4968-8b1c-7e3cdb4a3c2c'} - -The supplied value needs to be a Python ``list``. Specifying values of a different basic type or a mixture of types throws a ``ValidationError``:: - - >>> user = User(email='john.doe@example.com', roles=[2, 1]) - ValidationError Traceback (most recent call last) - ... - ValidationError: {'roles': ['Invalid value [2, 1]']} - -``List`` has two optional arguments: - -- ``content_type``: The type of Fields enclosed in the list. - - Accepted Field Types are: - - - ``Boolean`` - - ``Date`` - - ``DateTime`` - - ``Float`` - - ``Identifier`` - - ``Integer`` - - ``String`` - - ``Text`` - - Default ``content_type`` is ``String``. - -- ``pickled``: Flag to treat the field as a Python object. Defaults to ``False``. Some database implementations (like Postgresql) can store lists by default. You can force it to store the pickled value as a Python object by specifying ``pickled=True``. Databases that don't support lists simply store the field as a python object, serialized using pickle. - -Dict -```` - -A map that closely resembles the Python Dictionary in its utility. - -.. code-block:: python - - @domain.aggregate - class Event: - name = String(max_length=255) - created_at = DateTime(default=datetime.utcnow) - payload = Dict() - -A regular dictionary can be supplied as value to ``payload``:: - - >>> event=Event(name='UserRegistered', payload={'name': 'John Doe', 'email': 'john.doe@example.com'}) - >>> event.to_dict() - {'name': 'UserRegistered', - 'created_at': datetime.datetime(2021, 6, 25, 22, 37, 24, 680524), - 'payload': {'name': 'John Doe', 'email': 'john.doe@example.com'}, - 'id': 'ab803d41-b8b0-48e6-a930-f0f265f62d9e'} - -``Dict`` accepts an optional argument: - -- ``pickled``: Flag to treat the field as a Python object. Defaults to ``False``. Some database implementations (like Postgresql) can store dicts as JSON by default. You can force it to store the pickled value as a Python object by specifying ``pickled=True``. Databases that don't support lists simply store the field as a python object, serialized using pickle. - -Method -`````` - -Nested -`````` - -Associations ------------- - -Embedded Fields ---------------- diff --git a/docs-sphinx/user/foreword.rst b/docs-sphinx/user/foreword.rst deleted file mode 100644 index 81efa282..00000000 --- a/docs-sphinx/user/foreword.rst +++ /dev/null @@ -1,40 +0,0 @@ -Foreword -======== - -Read this before you get started with Protean. This hopefully answers some questions about the purpose and goals of the project and when you should or should not be using it. - -Mirror the domain in code -------------------------- - -Protean allows you to express your domain (a.k.a, business requirements) clearly and concisely, without worrying about underlying technology or infrastructure. - -By separating domain logic from infrastructure, Protean allows you to: -- Reflect the business as accurately as possible in code without translation -- Delay making technology choices until the choices are obvious -- Test business logic thoroughly and have 100% coverage - -The underlying infrastructure is abstracted away, and you specify the technology choice through config attributes. Even with this flexibility, Protean remains pragmatic and allows you to override its implementation. For instance, you can use exclusive technology features (like non-SQL92-compliant database queries or non-standard messaging interfaces) for performance or aesthetic reasons. - -.. //FIXME Include reference to Escape hatches - -Remain technology agnostic --------------------------- - -Protean allows you to pick and choose technology components through configuration without affecting core domain logic. These components are made available as adapters that conform in structure and behavior to a published port interface. - -Infrastructure components like databases, API frameworks, message brokers, and cache are instantiated outside the application and injected into the framework at runtime. This helps choose different technologies in diverse circumstances for the same code base - tests, for example, can be run on a lightweight database. - -Protean comes pre-packaged with adapters for many technology choices, but it is relatively straightforward to roll out your own if need be. - -Choose the right design pattern -------------------------------- - -Protean adopts |ddd| as its primary approach to building large-scale applications but also allows other architecture patterns, like CRUD, CQRS, and EventSourcing, to be used in combination. You are also free to choose the extent to which you follow each architecture's principles. - -A Protean application is typically made up of one or more microservices that communicate with each other through Domain Events but are in total control of their own architecture. They can choose from a variety of patterns like DDD, CQRS, ES, or CRUD. The decision of which pattern is most suited for the problem at hand is left to each microservice. - -In reality, Protean applications tend to be a combination of one or more of these patterns. It is indeed an anti-pattern to have the entire application built on a single design pattern. - -.. |ddd| raw:: html - - Domain-Driven Design diff --git a/docs-sphinx/user/installation.rst b/docs-sphinx/user/installation.rst deleted file mode 100644 index f41bfaa9..00000000 --- a/docs-sphinx/user/installation.rst +++ /dev/null @@ -1,113 +0,0 @@ -Installation -============ - -Install Python --------------- - -.. note:: Protean supports Python 3.7 and newer, but it is recommended that that you use the latest version of Python. - -Virtual environments allow you to install multiple Python versions side-by-side, without interfering with system-default Python installations. They also help you to work with different groups of Python libraries, one for each project, thereby preventing packages installed in one project from affecting other projects. - -There are many version managers that help you create virtual environments, like |pyenv| and |pipenv|, but we will quickly walk through the steps to create a virtual environment with one bundled with Python, :mod:`venv`. - -.. |pyenv| raw:: html - - pyenv - -.. |pipenv| raw:: html - - pipenv - - -.. _install-create-env: - -Create an environment -~~~~~~~~~~~~~~~~~~~~~ - -Create a project folder and a :file:`venv` folder within: - -.. tabs:: - - .. group-tab:: macOS/Linux - - .. code-block:: text - - $ mkdir myproject - $ cd myproject - $ python3 -m venv venv - - .. group-tab:: Windows - - .. code-block:: text - - > mkdir myproject - > cd myproject - > py -3 -m venv venv - - -.. _install-activate-env: - -Activate the environment -~~~~~~~~~~~~~~~~~~~~~~~~ - -Before you work on your project, activate the corresponding environment: - -.. tabs:: - - .. group-tab:: macOS/Linux - - .. code-block:: text - - $ . venv/bin/activate - - .. group-tab:: Windows - - .. code-block:: text - - > venv\Scripts\activate - -Your shell prompt will change to show the name of the activated -environment. - -You can verify the Pyton version by typing ``python`` from your shell; -you should see something like:: - - Python 3.8.10 (default, Jun 21 2021, 15:30:31) - [Clang 12.0.5 (clang-1205.0.22.9)] on darwin - Type "help", "copyright", "credits" or "license" for more information. - >>> - - -Install Protean ---------------- - -Within the activated environment, install Protean with the following command: - -.. code-block:: shell - - $ pip install protean - - -Verifying ---------- - -Use the ``protean`` command-line utility to verify the installation: - -.. code-block:: shell - - $ python -m protean --version - 0.12.1 - -To verify that Protean can be seen by your current installation of Python, -try importing Protean from a ``python`` shell: - -.. code-block:: shell - - $ python3 - >>> import protean - >>> protean.get_version() - 0.12.1 - -------------------- - -That's it! You can now move onto the :doc:`quickstart`. diff --git a/docs-sphinx/user/persistence.rst b/docs-sphinx/user/persistence.rst deleted file mode 100644 index b0b0d683..00000000 --- a/docs-sphinx/user/persistence.rst +++ /dev/null @@ -1,187 +0,0 @@ -Persisting Data -=============== - -To keep the application technology agnostic, persistence in Protean is handled with the help of repositories that abstract all database interactions. The repository layer encapsulates all the logic required to access data sources. Modeled after the Repository Pattern, repositories are responsible for loading and persisting aggregates. - -Repositories represent domain concepts that are present in the database. For example, say you have a requirement of fetching adult users (over the age of 21) from the database. The user repository would then have a function called `get_adults` which would use underlying Data Transfer Objects to make a query for `age >= 21`. - -.. code-block:: python - - @domain.repository(aggregate_cls=User) - class UserRepository: - @classmethod - def get_adults(cls, age: int = 21) -> List: - user_dao = current_domain.get_dao(User) - return user_dao.filter(age__gte=age).all() - -Saving to Database ------------------- - -You can obtain a repository associated with your aggregate with ``domain.repository_for`` method: - -.. code-block:: python - - from protean.globals import current_domain - - current_domain.repository_for(Post) - -Protean's repositories are collection-oriented. They are designed to closely mimic how a collection data type, like `list`, `dictionary` and `set`, would work. The Repository interface does not expose the underlying persistence mechanism, avoiding any notion of saving or persisting data to a store from leaking into the Application Service or Domain Model. - -There is a one-to-one relationship between an Aggregate and a Repository: Every Aggregate has a repository. Also, Aggregates alone have Repositories. - -Yoy can persist an aggregate with the help of ``add`` method. The ``add`` method places the new aggregate in a transaction. - -.. code-block:: python - - post_repo = current_domain.repository_for(Post) - post = Post(title="A catchy post title") - - post_repo.add(post) - -The `post` record will be persisted into the data store immediately, or when the :ref:`unit-of-work` is committed if the transaction is running under an active UoW. - -Persisted data can be removed by its unique identifier: - -.. code-block:: python - - post_repo = current_domain.repository_for(Post) - post = post_repo.get(1) - - post_repo.remove(post) - -.. note:: It is generally recommended that data never be permanently deleted from the system. It is better to use soft deletes or archiving functionalities to mark data as archived or defunct. The ``remove`` method should be primarily used for testing purposes. - -Retrieving Data ---------------- - -The ``get`` method retrieves the object with the specified key from the persistence store. - -.. code-block:: python - - post_repo = current_domain.repository_for(Post) - post = post_repo.get(1234) - -You can also fetch all records of an Aggregate with the ``all`` method: - -.. code-block:: python - - post_repo = current_domain.repository_for(Post) - posts = post_repo.all() - -Beware that the ``all`` method returns **all** records of an Aggregate type from the database as it stands today. It is meant to be used for testing purposes. Application queries should preferably be implemented outside the Domain as close as possible to the database for performance reasons. Aggregate and Repository patterns are meant to serve the write-side of the application. It is left to the application to organize the read-side to be as efficient as possible. - -.. // FIXME Add documentation for DAO API - -All other querying capabilities are performed through the DAO `filter` method. - -.. code-block:: python - - @domain.repository(aggregate_cls=User) - class UserRepository: - @classmethod - def fetch_residents(cls, zipcode: str) -> List: - user_dao = current_domain.get_dao(User) - - return user_dao.filter(zipcode=zipcode).all() - - -Custom Repositories -------------------- - -You would often want to add custom methods to your repository to aid database interactions. You can do so by defining and registering your own custom repository. - -A Repository can be defined and registered with the help of ``@domain.repository`` decorator: - -.. code-block:: python - - @domain.repository(aggregate_cls='app.User') - class UserRepository: - @classmethod - def get_by_email(cls, email: str) -> User: - user_dao = current_domain.get_dao(User) - return user_dao.find_by(email=email) - -A Repository is linked to its aggregate with the `aggregate_cls` meta attribute. The value of `aggregate_cls` can be the Aggregate class itself, or in the form of a weak reference - a string with the the fully-qualified aggregate class name. - -Database-specific Repositories ------------------------------- - -A repository can be locked to a specific database implementation. This feature comes handy if you ever use different databases with the same aggregate, for example, in testing and production environments. A repository locked to a specific database is picked up only when the aggregate's provider database matches the value specified. - -.. code-block:: python - - @domain.aggregate - class User: - first_name = String() - last_name = String() - - class Meta: - provider = 'sqlite' - - @domain.repository(aggregate_cls='app.User') - class UserRepository: - class Meta: - database = Database.SQLITE.value - -This feature also allows multiple repositories to be defined and linked per database to the aggregate. The full list of supported databases can be found :ref:`here`. Refer to :doc:`config` documentation to understand how providers are defined. - -Data Access Objects -------------------- - -Protean repositories internally use Data Access Objects (DAO) to access the persistency layer. See :ref:`adapters-dao` for more information on using Data Access Objects. - -.. code-block:: python - - user_dao = current_domain.get_dao(User) - users = user_dao.filter(state='CA') - -Data Access Objects (DAOs) can be accessed throughout the application, but it is recommended that you access them only within the repositories, in line with the pattern of placing all data access operations in the repository layer. - -At first glance, repositories and Data Access Objects may seem similar. But a repository leans towards the domain in its functionality. It contains methods and implementations that clearly identify what the domain is trying to ask/do with the persistence store. Data Access Objects, on the other hand, talk the language of the database. A repository works in conjunction with the DAO layer to access and manipulate on the persistence store. - -This separation is necessary because we want the domain layer to be agnostic to the underlying persistence store implementation. DAO are concrete implementations, one per persistence store, and are built as adapters to the Repository Port in Protean. You can switch between them without having to touch your domain functionality just by replacing plugins in your application configuration. Refer to :ref:`adapters-dao` for more information. - -Working with Application Services ---------------------------------- - -A repository's methods are typically used by :ref:`application-service` to perform lifecycle operations. - -.. code-block:: python - - @domain.application_service(aggregate_cls=User) - class SignupService: - """ Application Service that contains methods to help users register and sign up""" - @classmethod - def register(cls, request_object: UserRegistration): - # Fetch the repository configured for `User` Aggregate - repo = domain.repository_for(User) - - # Invoke the domain function to register a new User - user = User.register(request_object) - - # Persist the new user - repo.add(user) - -Unit of Work ------------- - -When there is an active Unit of Work in progress, changes performed by repositories are preserved as part of a session, and committed as an ACID transaction at the end. The entire transaction can be committed on success, or rolled back on error. - -.. code-block:: python - - from protean.core.unit_of_work import UnitOfWork - - @domain.application_service(aggregate_cls=User) - class SignupService: - - @classmethod - def register(cls, request_object: UserRegistration): - # Initialize a Unit of Work for controlling transactions - with UnitOfWork(): - repo = domain.repository_for(User) # The repository is now within a UoW - user = User.register(request_object) - repo.add(user) # User is not added to the persistence store yet - - # The Unit of Work transaction would have been committed by this point - -Note that Protean still depends on the capabilities of the underlying database to support transactional functionality. While changes are flushed as a single unit, it is left to the database implementation to construct and manage sessions and commit transactions atomically. diff --git a/docs-sphinx/user/quickstart.rst b/docs-sphinx/user/quickstart.rst deleted file mode 100644 index 6bc87b16..00000000 --- a/docs-sphinx/user/quickstart.rst +++ /dev/null @@ -1,212 +0,0 @@ -Quickstart -========== - -Eager to get started? This page gives a good introduction to Protean. Follow :doc:`installation` to set up a project and install Protean first. - -In this quickstart, we will create a simple Protean application with SQLITE as the database and Flask as the API framework. - -Initialize a Project --------------------- - -Let us initialize a new directory for your project. We will call it ``authentication``. - -From the command line, cd into a directory where you’d like to store your code, then run the following command: - -.. code-block:: shell - - $ protean new authentication - -This will create a ``authentication`` directory in your current directory. - -Here is a quicklook at the directory structure: - - -A Simple Domain ---------------- - -A simple Protean domain looks something like this: - -.. code-block:: python - - from protean import Domain - - domain = Domain(__name__) - -Here's what we did: - -1. First we imported the :class:`protean.Domain` class. An instance of this class will be our domain root to which all elements are attached. -2. Next, we create an instance of this class. The optional argument is the name of the domain's module or package. ``__name__`` is a convenient shortcut for this that is appropriate for most cases, so it is the default if no name is specified explicitly. - -Define an Aggregate -------------------- - -Aggregates are the basic building blocks of the domain. Use the :meth:`protean.Domain.aggregate` decorator to bind an Aggregate to the domain. - -.. code-block:: python - - from protean.field import String - - @domain.aggregate - class User: - name = String(max_length=50) - email = String(max_length=255, unique=True) - -Define an Application Service ------------------------------ - -Application services expose the domain to the external world. You can create an Application Service with the help of :meth:`protean.Domain.application_service` decorator. - -.. code-block:: python - - @domain.application_service - class SignupService: - @classmethod - def signup(cls, name, email): - user = User(name=name, email=email) - domain.repository_for(User).add(user) - - return user - -Configure a database --------------------- - -By default, a Protean domain is configured with an :class:`protean.adapters.repository.MemoryProvider` that manages a dictionary database in memory. This database is handy when you get started with your domain, especially for testing purposes. You can also specify an alternate implementation by overriding the database config. Let's do that and specify an SQLITE database. - -Note that Protean uses SQLAlchemy to access the SQLITE database internally. - -.. code-block:: python - - domain.config["DATABASES"]["default"] = { - "PROVIDER": "protean.adapters.repository.sqlalchemy.SAProvider", - "DATABASE": "SQLITE", - "DATABASE_URI": "sqlite:///quickstart.db", - } - -A database file ``quickstart.db`` will be created in the location you will be running your application from. - -Initialize the domain ---------------------- - -Before you can use the domain, you need to initialize it. Initialize the domain by calling :meth:`protean.Domain.init` on the domain instance. - -.. code-block:: python - - domain.init(traverse=False) - -Since all our code is in the same module, we can use ``traverse=False``. If you have your code spread across multiple modules, you can set ``traverse=True`` to traverse the entire module tree and load all the elements. - -Configure Flask ---------------- - -Let's next expose the domain to the external world via APIs with |flask|. We accomplish this by activating the domain in a function that runs before every request. - -We also register a function to run before Flask processes the very first request, in which we set up the database with a table whose structure is auto-generated from the Aggregate definition. - -.. code-block:: python - - from flask import Flask - - app = Flask(__name__) - - @app.before_request - def set_context(): - context = domain.domain_context() - context.push() - -If you want to create the database tables automatically from the structure defined in the domain, you can: - -.. code-block:: python - - @app.before_first_request - def setup_db(): - with domain.domain_context(): - for provider in domain.providers_list(): - for _, aggregate in domain.registry.aggregates.items(): - domain.repository_for(aggregate.cls)._dao - - provider._metadata.create_all() - -.. |flask| raw:: html - - Flask - -Define a route --------------- - -We are now ready to define API routes for the domain. Let's create a route that helps us create new users as well as returns a list of all existing users. - -.. code-block:: python - - @app.route("/users", methods=["GET", "POST"]) - def users(): - if request.method == "POST": - user = SignupService.signup(request.form['name'], request.form['email']) - return json.dumps(user.to_dict()), 201 - else: - users = current_domain.repository_for(User).all() - return json.dumps([user.to_dict() for user in users]), 200 - -Start the Flask server ----------------------- - -To run the Flask application, use the ``flask`` command or ``python -m flask``. The snippet below assumes that your code is saved in a file named ``quickstart.py``. If it is not, adjust the command accordingly. - -.. code-block:: shell - - $ export FLASK_APP=quickstart - $ flask run - -If all is well, you should see a success message at the console along with the URL to access the Flask server. - -Access the domain over APIs ---------------------------- - -You can access the APIs once the server is running. We can use |httpie| to fire requests from the console. Let's first fire a ``POST`` request to create a user. - -.. code-block:: shell - - http -f POST http://localhost:5000/users name=John email=john.doe@example.com - -You should see a success message with the user record that was just created. - -.. code-block:: shell - - HTTP/1.0 201 CREATED - Content-Length: 95 - Content-Type: text/html; charset=utf-8 - Date: Mon, 09 Aug 2021 16:19:31 GMT - Server: Werkzeug/1.0.1 Python/3.9.4 - - { - "email": "john.doe@example.com", - "id": "41de0f44-9dd0-4ac9-98e3-5e2eca498511", - "name": "John" - } - -We can now fire a ``GET`` request to retrieve all users from the database. - -.. code-block:: shell - - http http://127.0.0.1:5000/users - - HTTP/1.0 200 OK - Content-Length: 97 - Content-Type: text/html; charset=utf-8 - Date: Mon, 09 Aug 2021 16:19:36 GMT - Server: Werkzeug/1.0.1 Python/3.9.4 - - [ - { - "email": "john.doe@example.com", - "id": "41de0f44-9dd0-4ac9-98e3-5e2eca498511", - "name": "John" - } - ] - -.. |httpie| raw:: html - - HTTPie - --------------------- - -That's it! You have now created a simple Protean domain with SQLITE and Flask and accessed it over the web. diff --git a/docs-sphinx/user/services.rst b/docs-sphinx/user/services.rst deleted file mode 100644 index 42f3f67b..00000000 --- a/docs-sphinx/user/services.rst +++ /dev/null @@ -1,34 +0,0 @@ -Exposing the domain -=================== - -A domain rarely stands alone by itself. You eventually have to expose the domain to other services or at least a user interface, for it to be of any use. Your application has to interact with the database, load pre-existing data, coordinate transactions, and dispatch events, all while relying on the domain model to protect invariants and maintain data sanctity. Application Services are the primary vehicles that help you connect your domain to the external world. - -Application Services are the direct clients of the domain model. They are responsible for task coordination of use case flows, ideally one service method per flow. All aspects related to infrastructure, like security (user authentication, permissions, and authorization), persistence (data fetch and data save, transactions), and messaging (publishing and receiving messages), occur in the Application layer. - -Routing requests to Application Services ----------------------------------------- - -You can define an Application service with `@domain.application_service` decorator: - -.. code-block:: python - - @domain.application_service - class UserRegistration: - - def register(first_name, last_name, password, email): - ... - -Typically, an application service fetches the data from the database, loads the relevant parts of the domain model, and coordinates an action. - -.. code-block:: python - - @domain.application_service - class OrderServices: - @classmethod - def change_delivery_address(order_id, address1, address2, address3, city, country, zipcode): - order_repo = current_domain.repository_for(order_id) - order = order_repo.get(order_id) - - order.change_address(address1, address2, address3, city, country, zipcode) - - order_repo.add(order) diff --git a/docs/core-concepts/building-blocks/aggregates.md b/docs/core-concepts/building-blocks/aggregates.md index fbf6ffbe..13239b67 100644 --- a/docs/core-concepts/building-blocks/aggregates.md +++ b/docs/core-concepts/building-blocks/aggregates.md @@ -3,10 +3,10 @@ An aggregate is a cluster of domain objects that can be treated as a single unit for data changes. -Each aggregate has a root entity, known as the aggregate root, which is +Each aggregate has a root entity, known as the aggregate root, responsible for enforcing business rules and ensuring the consistency of changes within the aggregate. In Protean, **aggregate** and **aggregate root** -are synonymous. +are treated as synonymous. Aggregates help to maintain the integrity of the data by defining boundaries within which invariants must be maintained. @@ -29,7 +29,7 @@ is aborted. ### Aggregates enclose business invariants. { data-toc-label="Invariants" } -Aggregates contain invariants that should be satisfied at all times - they +Aggregates contain invariants that should always be satisfied - they are checked before and after every change to the aggregate. Invariants can be specified at the level of an aggregate's fields, the entire aggregate cluster, individual entities, or domain services that operate on multiple aggregates. @@ -40,7 +40,7 @@ Aggregates compose a graph of enclosed elements. The objects themselves can nest other objects and so on infinitely, though it is recommended to not go beyond 2 levels. -### Aggregates can hold two types of objects - Entites and Value Objects. { data-toc-label="Types of Objects" } +### Aggregates can hold two types of objects - Entities and Value Objects. { data-toc-label="Types of Objects" } Entities are objects with an identity. Value objects don't have identity; their data defines their identity. @@ -57,20 +57,20 @@ They internally load and manage the objects within their cluster. Aggregates are persisted and retrieved with the help of repositories. Repositories are collection-oriented - they mimic how a collection data type, -like list, dictionary and set, would work. Repositories can be augmented with +like list, dictionary, and set, would work. Repositories can be augmented with custom methods to perform business queries. ### Aggregates are transaction boundaries. { data-toc-label="Transactions" } All changes to aggregates are performed within a transaction. This means that all objects in the aggregates cluster are enclosed in a single transaction -during persistence. This also translates to mean that all objects within an -aggregate cluster are kep together in the same persistence store. +during persistence. This also means that all objects within an +aggregate cluster are kept together in the same persistence store. ### Aggregates can enclose up to 500 entities. { data-toc-label="Limits" } The object graph under an aggregate is loaded eagerly. The number of associations -under an aggregate are limited to 500. If you expect the number of entities to +under an aggregate is limited to 500. If you expect the number of entities to exceed this limit, rethink your aggregate boundary. One way would be to split the aggregate into multiple aggregates. Another would be to make the underlying entity an aggregate by itself. diff --git a/docs/core-concepts/building-blocks/events.md b/docs/core-concepts/building-blocks/events.md index df8243b5..1fce445f 100644 --- a/docs/core-concepts/building-blocks/events.md +++ b/docs/core-concepts/building-blocks/events.md @@ -1,41 +1,139 @@ # Events -### Events allows different components to communicate with each other. +Domain events are immutable facts that indicate a state change in the +business domain. They capture meaningful changes and convey the state +transitions of aggregates, ensuring that all parts of the system remain +consistent and informed. -Within a domain or across, events can be used as a mechanism to implement -eventual consistency, in the same bounded context or across. This promotes -loose coupling by decoupling the producer (e.g., an aggregate that raises -an event) from the consumers (e.g., various components that handle the -event). +## Facts -Such a design eliminates the need for two-phase commits (global -transactions) across bounded contexts, optimizing performance at the level -of each transaction. +### Events are essentially Data Transfer Objects (DTO). { data-toc-label="Data Transfer Objects" } +They can only hold simple fields and Value Objects. + +### Events are named using past-tense verbs. { data-toc-label="Named in Past-Tense" } +Events should be named in past tense, because we observe domain events _after +the fact_. `StockDepleted` is a better choice than the imperative +`DepleteStock` as an event name. + +### Events contain only necessary information. { data-toc-label="Minimal" } +Events should only include the data necessary to describe the change that +occurred. This keeps them lightweight and focused. + +### Events are immutable. { data-toc-label="Immutable" } +Once created, events cannot be changed. They are a factual record of something +that has occurred in the past within the domain. -### Events act as API contracts. +### Events communicate state changes. { data-toc-label="Propagate State Change" } +Events inform other parts of the system about significant state changes in +aggregates, ensuring all interested components can respond appropriately. + +### Events act as API contracts. { data-toc-label="Act as API Contracts" } Events define a clear and consistent structure for data that is shared between different components of the system. This promotes system-wide interoperability and integration between components. -### Events help preserve context boundaries. +### Events help preserve context boundaries. { data-toc-label="Sync across Boundaries" } Events propagate information across bounded contexts, thus helping to sync changes throughout the application domain. This allows each domain to be modeled in the architecture pattern that is most appropriate for its use case. -- Events should be named in past tense, because we observe domain events _after -the fact_. `StockDepleted` is a better choice than the imperative -`DepleteStock` as an event name. -- An event is associated with an aggregate or a stream, specified with -`part_of` or `stream` parameters to the decorator, as above. We will -dive deeper into these parameters in the Processing Events section. - -- Events are essentially Data Transfer Objects (DTO)- they can only hold -simple fields and Value Objects. -- Events should only contain information directly relevant to the event. A -receiver that needs more information should be listening to other pertinent -events and add read-only structures to its own state to take decisions later. -A receiver should not query the current state from the sender because the -sender's state could have already mutated. \ No newline at end of file +### Events enable decoupled communication. { data-toc-label="Decouple services" } +Systems and components communicate through events, reducing direct dependencies +and fostering a more modular architecture. + +Events, thus, can be used as a mechanism to implement eventual consistency, +within and across bounded contexts. This promotes loose coupling by decoupling +the producer (e.g., an aggregate that raises an event) from the consumers +(e.g., various components that handle the event). + +Such a design eliminates the need for two-phase commits (global +transactions) across bounded contexts, optimizing performance at the level +of each transaction. + +## Structure + +### Events have **metadata**. { data-toc-label="Metadata" } +Metadata such as timestamps, unique event identifiers, and version numbers are +included to ensure precise tracking and processing. + +### Events are **versioned**. { data-toc-label="Versioning" } +Each event is assigned a version number, ensuring that consumers can handle +them in the correct order and manage compatibility between event producers and +consumers. + +### Events are **timestamped**. { data-toc-label="Timestamp" } +Each event carries a timestamp indicating when the event occurred, which is +crucial for tracking and ordering events chronologically. + +### Events are identifiable uniquely. { data-toc-label="Identifiers" } +Each event carries a structured unique identifier that indicates the origin of +the event and the unique identity of the aggregate that generated the event. + +### Event Streams +Events are written to and read from streams. Review the section on +[Streams](../streams.md) for a deep-dive. + +## Event Types + +Events are categorized into two different types based on their purpose and the +kind of information they carry. + +### Delta Events + +Delta events capture incremental changes that have occurred in the state of +an aggregate. They provide detailed information about the specific +modifications made, allowing systems to apply only the necessary updates. + +Delta type events record precise changes, such as attribute updates or +modifications to collections within an aggregate. By focusing on incremental +changes, Delta Events enable efficient updates and reduce the overhead +associated with processing entire aggregates. + +Delta events are a good choice when composing internal state via Event Sourcing +or notifying external consumers for choice events, like `LowInventoryAlert`. +They are also appropriate for composing a custom view of the state based on +events (for example in Command Query Resource Separation). + +### Fact Events + +A fact event encloses the entire state of the aggregate at that specific point +in time. It contains all of the attributes and values necessary to completely +describe the fact in the context of your business. + +A fact event is similar to a row in a database: a complete set of data +pertaining to the row at that point in time. + +Fact events enable a pattern known as **Event-carried State Transfer**. With +these events, consumers do not have to build up the state themselves from +multiple delta event types, which can be risky and error-prone, especially as +data schemas evolve and change over time. Instead, they rely on the owning +service to compute and produce a fully detailed fact event. + +## Persistence + +### Events are stored in an Event Store. { data-toc-label="Event Store" } +Events are often persisted in an Event Store, which acts as an append-only +log of all events, ensuring a reliable history of changes. + +### Events support Event Sourcing. { data-toc-label="Event Sourcing" } +In Event Sourcing, the state of a domain entity is reconstructed by replaying +the sequence of events from the Event Store, ensuring a complete and accurate +history. + +### Events are part of the transaction boundary. { data-toc-label="Transactions" } +Events are typically included in the transaction boundary, ensuring that they +are only published if the transaction is successful. + +### Events trigger side effects. { data-toc-label="Side Effects" } +Events often lead to side effects, such as updating read models, triggering +workflows, or invoking external systems. These side effects are managed by +event handlers. + +### Events can be used to build local state in a different bounded context. { data-toc-label="Read-only Models" } +Other bounded contexts should be listen to interested events and construct +read-only structures within themselves to take decisions later. A receiver +should not query the current state from the sender because the sender's state +could have already mutated. diff --git a/docs/core-concepts/streams.md b/docs/core-concepts/streams.md new file mode 100644 index 00000000..db4757f0 --- /dev/null +++ b/docs/core-concepts/streams.md @@ -0,0 +1 @@ +# Streams \ No newline at end of file diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css new file mode 100644 index 00000000..2d6f9081 --- /dev/null +++ b/docs/stylesheets/extra.css @@ -0,0 +1,52 @@ +[data-md-color-scheme="default"] { + --md-primary-fg-color: #1D3557; + + --md-accent-fg-color: #E76F51; + + --md-typeset-a-color: var(--md-accent-fg-color); + + color-scheme: light; +} + +p { + line-height: 1.75em; +} + +a, a:hover, a:visited, a:active { + text-decoration: none; +} + +p a, article ul li a { + font-weight: 600; +} + +.md-typeset h1, .md-typeset h2 { + color: var(--md-primary-fg-color); + font-weight: 800; + letter-spacing: -.025em; +} + +.md-typeset h1 { + font-size: 1.875em; +} + +.md-typeset h2 { + font-size: 1.5em; +} + +.md-typeset h3 { + font-weight: 600; + font-size: 1.25em; +} + +.md-nav { + line-height: 1.5em; +} + +.md-nav__link { + color: var(--md-primary-fg-color) !important; +} + +.md-nav__title { + color: var(--md-primary-fg-color) !important; +} \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 2dc6d999..296a58f2 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -2,6 +2,7 @@ site_name: Protean site_url: https://docs.proteanhq.com repo_name: proteanhq/protean repo_url: https://github.com/proteanhq/protean + theme: name: material font: @@ -18,6 +19,7 @@ theme: toggle: icon: material/lightbulb-outline name: Switch to light mode + features: - navigation.instant - navigation.instant.prefetch @@ -40,6 +42,10 @@ theme: - content.code.annotate - content.code.copy - content.code.select + +extra_css: + - stylesheets/extra.css + plugins: - privacy markdown_extensions: @@ -64,6 +70,7 @@ nav: - Core Concepts: # - core-concepts/index.md - core-concepts/analysis-model.md + - core-concepts/streams.md # - core-concepts/identity.md # - Domain-Driven Design: # - core-concepts/domain-driven-design/index.md diff --git a/src/protean/container.py b/src/protean/container.py index 6125f1ff..9dd85355 100644 --- a/src/protean/container.py +++ b/src/protean/container.py @@ -226,13 +226,17 @@ def __init__(self, *template, **kwargs): # noqa: C901 # Now load against the keyword arguments for field_name, val in kwargs.items(): + # Record that a field was encountered by appending to `loaded_fields` + # When it fails validations, we want it's errors to be recorded + # + # Not remembering the field was recorded will result in it being set to `None` + # which will raise a ValidationError of its own for the wrong reasons (required field not set) + loaded_fields.append(field_name) try: setattr(self, field_name, val) except ValidationError as err: for field_name in err.messages: self.errors[field_name].extend(err.messages[field_name]) - finally: - loaded_fields.append(field_name) # Load Value Objects from associated fields # This block will dynamically construct value objects from field values diff --git a/src/protean/core/value_object.py b/src/protean/core/value_object.py index 6681ea7b..3986bc53 100644 --- a/src/protean/core/value_object.py +++ b/src/protean/core/value_object.py @@ -6,7 +6,7 @@ from protean.container import BaseContainer, OptionsMixin, fields from protean.exceptions import IncorrectUsageError, NotSupportedError, ValidationError -from protean.fields import Reference +from protean.fields import Reference, ValueObject from protean.fields.association import Association from protean.utils import DomainObjects, derive_element_class @@ -126,6 +126,29 @@ def __init__(self, *template, **kwargs): # noqa: C901 for field_name in err.messages: self.errors[field_name].extend(err.messages[field_name]) + # Load Value Objects from associated fields + # This block will dynamically construct value objects from field values + # and associated the vo with the entity + # If the value object was already provided, it will not be overridden. + for field_name, field_obj in fields(self).items(): + if isinstance(field_obj, (ValueObject)) and not getattr(self, field_name): + attrs = [ + (embedded_field.field_name, embedded_field.attribute_name) + for embedded_field in field_obj.embedded_fields.values() + ] + values = {name: kwargs.get(attr) for name, attr in attrs} + try: + value_object = field_obj.value_object_cls(**values) + # Set VO value only if the value object is not None/Empty + if value_object: + setattr(self, field_name, value_object) + loaded_fields.append(field_name) + except ValidationError as err: + for sub_field_name in err.messages: + self.errors["{}_{}".format(field_name, sub_field_name)].extend( + err.messages[sub_field_name] + ) + # Now load the remaining fields with a None value, which will fail # for required fields for field_name in fields(self): @@ -139,15 +162,15 @@ def __init__(self, *template, **kwargs): # noqa: C901 for field in custom_errors: self.errors[field].extend(custom_errors[field]) + # If we made it this far, the Value Object is initialized + # and should be marked as such + self._initialized = True + # Raise any errors found during load if self.errors: logger.error(self.errors) raise ValidationError(self.errors) - # If we made it this far, the Value Object is initialized - # and should be marked as such - self._initialized = True - def __setattr__(self, name, value): if not hasattr(self, "_initialized") or not self._initialized: return super().__setattr__(name, value) diff --git a/tests/value_object/test_vo_in_vo.py b/tests/value_object/test_vo_in_vo.py index 5b8940be..c33cefa3 100644 --- a/tests/value_object/test_vo_in_vo.py +++ b/tests/value_object/test_vo_in_vo.py @@ -32,7 +32,7 @@ def test_contact_has_address_vo(): def test_outer_vo_initialization(): contact = Contact( - email="", + email="john.doe@example.com", phone_number="123-456-7890", address=Address( street="123 Main Street", city="Anytown", state="CA", zip_code="12345" @@ -40,3 +40,25 @@ def test_outer_vo_initialization(): ) assert contact is not None + assert contact.email == "john.doe@example.com" + assert contact.address == Address( + street="123 Main Street", city="Anytown", state="CA", zip_code="12345" + ) + assert contact.address_street == "123 Main Street" + + +def test_vo_initialization_with_attributes(): + contact = Contact( + email="john.doe@example.com", + phone_number="123-456-7890", + address_street="123 Main Street", + address_city="Anytown", + address_state="CA", + address_zip_code="12345", + ) + + assert contact is not None + assert contact.email == "john.doe@example.com" + assert contact.address == Address( + street="123 Main Street", city="Anytown", state="CA", zip_code="12345" + )