diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..4551056 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,100 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. Valid values are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # default: use os.pathsep + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = %(DATABASE_URL) + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..c4affea --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,84 @@ +aaaa() + + +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +from src.modules.catalog.infrastructure.persistence import ListingMetadata + +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = [ListingMetadata] + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. +section = config.config_ini_section +config.set_section_option( + section, "DATABASE_URL", "postgresql://postgres:password@localhost:5432/postgres" +) + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/poetry.lock b/poetry.lock index 225e4e8..b2c34bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,18 @@ +[[package]] +name = "alembic" +version = "1.7.3" +description = "A database migration tool for SQLAlchemy." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" + +[package.extras] +tz = ["python-dateutil"] + [[package]] name = "appdirs" version = "1.4.4" @@ -163,6 +178,29 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "mako" +version = "1.1.5" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["babel"] +lingua = ["lingua"] + +[[package]] +name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "mypy-extensions" version = "0.4.3" @@ -352,6 +390,18 @@ postgresql_psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql (<1)", "pymysql"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "sqlalchemy-json" +version = "0.4.0" +description = "JSON type with nested change tracking for SQLAlchemy" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" +sqlalchemy = ">=0.7" + [[package]] name = "starlette" version = "0.14.2" @@ -390,6 +440,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "typed-ast" +version = "1.4.3" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "typing-extensions" version = "3.10.0.0" @@ -417,9 +475,13 @@ standard = ["websockets (>=9.1)", "httptools (>=0.2.0,<0.3.0)", "watchgod (>=0.6 [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "d7b144a9134c54b0421ef53fae7b2f439bb7f9b4f65bf9b8fc27ac0a797a4f77" +content-hash = "7ade17978d50ff2eb7be0ef1446efc271c091b65fc4d7bb1e41bf21d33c5b4f1" [metadata.files] +alembic = [ + {file = "alembic-1.7.3-py3-none-any.whl", hash = "sha256:d0c580041f9f6487d5444df672a83da9be57398f39d6c1802bbedec6fefbeef6"}, + {file = "alembic-1.7.3.tar.gz", hash = "sha256:bc5bdf03d1b9814ee4d72adc0b19df2123f6c50a60c1ea761733f3640feedb8d"}, +] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, @@ -633,6 +695,46 @@ iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] +mako = [ + {file = "Mako-1.1.5-py2.py3-none-any.whl", hash = "sha256:6804ee66a7f6a6416910463b00d76a7b25194cd27f1918500c5bd7be2a088a23"}, + {file = "Mako-1.1.5.tar.gz", hash = "sha256:169fa52af22a91900d852e937400e79f535496191c63712e3b9fda5a9bed6fc3"}, +] +markupsafe = [ + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, +] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -791,6 +893,10 @@ sqlalchemy = [ {file = "SQLAlchemy-1.4.22-cp39-cp39-win_amd64.whl", hash = "sha256:1fdae7d980a2fa617d119d0dc13ecb5c23cc63a8b04ffcb5298f2c59d86851e9"}, {file = "SQLAlchemy-1.4.22.tar.gz", hash = "sha256:ec1be26cdccd60d180359a527d5980d959a26269a2c7b1b327a1eea0cab37ed8"}, ] +sqlalchemy-json = [ + {file = "sqlalchemy-json-0.4.0.tar.gz", hash = "sha256:d8e72cac50724a17cc137c98bec5cb5990e9f1e8fc3eb30dd225fb47c087ea27"}, + {file = "sqlalchemy_json-0.4.0-py2.py3-none-any.whl", hash = "sha256:0f52f24301aa3b5ea240b622facc489eff2e7bfddde931ba988bfabc306b1778"}, +] starlette = [ {file = "starlette-0.14.2-py3-none-any.whl", hash = "sha256:3c8e48e52736b3161e34c9f0e8153b4f32ec5d8995a3ee1d59410d92f75162ed"}, {file = "starlette-0.14.2.tar.gz", hash = "sha256:7d49f4a27f8742262ef1470608c59ddbc66baf37c148e938c7038e6bc7a998aa"}, @@ -807,6 +913,38 @@ tomlkit = [ {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"}, {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"}, ] +typed-ast = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, +] typing-extensions = [ {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, diff --git a/pyproject.toml b/pyproject.toml index f5b4c34..6bd814c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,6 +17,9 @@ psycopg2 = "^2.9.1" dependency-injector = "^4.35.2" colorlog = "^5.0.1" python-json-logger = "^2.0.2" +alembic = "^1.7.3" +sqlalchemy-json = "^0.4.0" +typed-ast = "^1.4.3" [tool.poetry.dev-dependencies] poethepoet = "^0.10.0" diff --git a/src/alembic.ini b/src/alembic.ini new file mode 100644 index 0000000..1589d7f --- /dev/null +++ b/src/alembic.ini @@ -0,0 +1,100 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. Valid values are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # default: use os.pathsep + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = postgresql://postgres:password@localhost/postgres + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/src/alembic/README b/src/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/src/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/src/alembic/env.py b/src/alembic/env.py new file mode 100644 index 0000000..3055c86 --- /dev/null +++ b/src/alembic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +from modules.catalog.infrastructure.listing_repository import CatalogListing +from seedwork.infrastructure.database import Base + +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = [CatalogListing.metadata] + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/src/alembic/script.py.mako b/src/alembic/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/src/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/src/alembic/versions/d6c2334f4816_initial_listing_model.py b/src/alembic/versions/d6c2334f4816_initial_listing_model.py new file mode 100644 index 0000000..67cb99f --- /dev/null +++ b/src/alembic/versions/d6c2334f4816_initial_listing_model.py @@ -0,0 +1,33 @@ +"""initial listing model + +Revision ID: d6c2334f4816 +Revises: +Create Date: 2021-09-27 17:33:02.166128 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "d6c2334f4816" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "catalog_listing", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("data", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("catalog_listing") + # ### end Alembic commands ### diff --git a/src/api/__main__.py b/src/api/__main__.py new file mode 100644 index 0000000..ad572de --- /dev/null +++ b/src/api/__main__.py @@ -0,0 +1,4 @@ +import uvicorn +from api.main import app + +uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/src/api/dependencies.py b/src/api/dependencies.py index b47ea30..7b89dbe 100644 --- a/src/api/dependencies.py +++ b/src/api/dependencies.py @@ -9,20 +9,3 @@ class CurrentUser(BaseModel): username = "fake_current_user" email = "fake@email.com" is_admin = True - - -class Logger: - ... - - -@dataclass -class RequestContext: - current_user: CurrentUser - logger: Logger - - -def request_context(): - return RequestContext( - current_user=CurrentUser(id=UUID("{12345678-1234-5678-1234-567812345678}")), - logger=Logger(), - ) diff --git a/src/api/main.py b/src/api/main.py index c8b3ba1..2e998ca 100644 --- a/src/api/main.py +++ b/src/api/main.py @@ -1,24 +1,13 @@ import logging -from os import getenv -from fastapi import FastAPI, Depends +import time +from fastapi import FastAPI, Request from seedwork.infrastructure.request_context import request_context -from starlette.middleware import Middleware -from starlette_context import context, plugins -from starlette_context.middleware import ContextMiddleware - - -from seedwork.domain.value_objects import UUID -from seedwork.infrastructure.repository import InMemoryRepository from seedwork.infrastructure.logging import logger, LoggerFactory -from modules.catalog.domain.entities import Listing -from modules.catalog.application.commands import CreateListingDraftCommand -from modules.catalog.application.command_handlers import create_listing_draft from api.routers import catalog, users -from api.middleware import RequestContextPlugin from config.api_config import ApiConfig from config.container import Container - +import api.routers.catalog # configure logger prior to first usage LoggerFactory.configure(logger_name="cli") @@ -26,16 +15,9 @@ # dependency injection container container = Container() container.config.from_pydantic(ApiConfig()) +container.wire(modules=[api.routers.catalog]) -# API middleware -middleware = [ - Middleware( - ContextMiddleware, - plugins=(RequestContextPlugin(container),), - ) -] - -app = FastAPI(debug=container.config.DEBUG, middleware=middleware) +app = FastAPI(debug=container.config.DEBUG) app.include_router(catalog.router) app.include_router(users.router) @@ -45,6 +27,20 @@ logger.info("using db engine %s" % str(container.engine())) +@app.middleware("http") +async def add_request_context(request: Request, call_next): + start_time = time.time() + request_context.begin_request() + logger.info("middleware") + try: + response = await call_next(request) + process_time = time.time() - start_time + response.headers["X-Process-Time"] = str(process_time) + return response + finally: + request_context.end_request() + + @app.get("/") async def root(): return {"info": "Online auctions API. See /docs for documentation"} diff --git a/src/api/middleware.py b/src/api/middleware.py deleted file mode 100644 index ee645a7..0000000 --- a/src/api/middleware.py +++ /dev/null @@ -1,18 +0,0 @@ -from os import umask - - -import uuid -from starlette_context.plugins import Plugin - - -class RequestContextPlugin(Plugin): - key = "request_context" - - def __init__(self, container) -> None: - super().__init__() - self.container = container - - async def process_request(self, request): - """When processing a request, we set up a new request context""" - context = self.container.request_context() - context._correlation_id.set(uuid.uuid4()) diff --git a/src/api/modules.py b/src/api/modules.py deleted file mode 100644 index 0c79fe7..0000000 --- a/src/api/modules.py +++ /dev/null @@ -1,4 +0,0 @@ -from modules.catalog.module import CatalogModule - - -catalog_module = CatalogModule() diff --git a/src/api/routers/catalog.py b/src/api/routers/catalog.py index 95f3ddc..0a36222 100644 --- a/src/api/routers/catalog.py +++ b/src/api/routers/catalog.py @@ -1,36 +1,72 @@ from fastapi import APIRouter, Depends -from seedwork.domain.value_objects import UUID +from seedwork.infrastructure.request_context import request_context from modules.catalog.module import CatalogModule -from modules.catalog.application.commands import CreateListingDraftCommand -from modules.catalog.application.queries import GetAllListingsQuery +from modules.catalog.application.command.create_listing_draft import ( + CreateListingDraftCommand, +) +from modules.catalog.application.query.get_all_listings import GetAllListings +from modules.catalog.application.query.get_listing_details import GetListingDetails -from ..dependencies import request_context, RequestContext -from ..modules import catalog_module as module +from config.container import Container, inject router = APIRouter() +def dependency(provider): + from dependency_injector.wiring import Provide + + return Depends(Provide[provider]) + + @router.get("/catalog", tags=["catalog"]) -async def get_all_listings(): - result = module.execute_query(GetAllListingsQuery()) +@inject +async def get_all_listings( + module: CatalogModule = dependency(Container.catalog_module), +): + query_result = module.execute_query(GetAllListings()) """ Shows all published listings in the catalog """ - return {"data": result} + return {"data": query_result.result} + + +@router.get("/catalog/add_next", tags=["catalog"]) +@inject +async def get_listing_details( + module: CatalogModule = dependency(Container.catalog_module), +): + """just for testing....""" + from modules.catalog.infrastructure.listing_repository import CatalogListing + + count = module.listing_repository.count() + listing = CatalogListing( + id=module.listing_repository.next_id(), data=dict(name=f"foo-{count}") + ) + module.listing_repository.session.add(listing) + return {"data": count} @router.get("/catalog/{listing_id}", tags=["catalog"]) -async def get_listing_details(listing_id: UUID): +@inject +async def get_listing_details( + listing_id, module: CatalogModule = dependency(Container.catalog_module) +): + """ + Shows listing details + """ + query_result = module.execute_query(GetListingDetails(listing_id=listing_id)) """ Shows all published listings in the catalog """ - return {"message": "catalog here"} + return {"data": query_result.result} + + return {"data": "catalog here"} @router.post("/catalog", tags=["catalog"], status_code=201) -async def create_listing(request_context: RequestContext = Depends(request_context)): +async def create_listing(module: CatalogModule = dependency(Container.catalog_module)): """ Creates a new listing. """ diff --git a/src/api/routers/users.py b/src/api/routers/users.py index e2ecb3b..80f34be 100644 --- a/src/api/routers/users.py +++ b/src/api/routers/users.py @@ -1,19 +1,19 @@ from fastapi import APIRouter, Depends -from api.dependencies import request_context, RequestContext +from seedwork.infrastructure.request_context import request_context, RequestContext router = APIRouter() - -@router.get("/users", tags=["users"]) -async def user_list(): - return [{"username": "Rick"}, {"username": "Morty"}] - - -@router.get("/users/me", tags=["users"]) -async def current_user_details(context: RequestContext = Depends(request_context)): - return context.current_user - - -@router.get("/users/{username}", tags=["users"]) -async def user_details(username: str): - return {"username": username} +# +# @router.get("/users", tags=["users"]) +# async def user_list(): +# return [{"username": "Rick"}, {"username": "Morty"}] +# +# +# @router.get("/users/me", tags=["users"]) +# async def current_user_details(context: RequestContext = Depends(request_context)): +# return context.current_user +# +# +# @router.get("/users/{username}", tags=["users"]) +# async def user_details(username: str): +# return {"username": username} diff --git a/src/cli/README.md b/src/cli/README.md new file mode 100644 index 0000000..354b42b --- /dev/null +++ b/src/cli/README.md @@ -0,0 +1,22 @@ +This is a sample command line script to print all listings + +1. Start the database + +``` +docker-compose -f docker-compose.dev.yml +``` + +2. Apply all migrations + +``` +cd src +alembic upgrade head +``` + +3. Run the script (from src directory): + +``` +python -m cli +``` + + diff --git a/src/cli/__main__.py b/src/cli/__main__.py index 32f02e1..8566f8b 100644 --- a/src/cli/__main__.py +++ b/src/cli/__main__.py @@ -1,12 +1,12 @@ -from uuid import uuid4 -from modules.catalog import catalog_container -from modules.catalog.domain.repositories import SellerRepository -from modules.catalog.application.queries import ( - GetAllListingsQuery, - GetListingsOfSellerQuery, -) from seedwork.infrastructure.request_context import request_context from seedwork.infrastructure.logging import logger, LoggerFactory +from config.container import Container +from modules.catalog.domain.repositories import SellerRepository +from modules.catalog.application.query.get_all_listings import GetAllListings +from modules.catalog.application.query.get_listings_of_seller import GetListingsOfSeller +from modules.catalog.application.command.create_listing_draft import ( + CreateListingDraftCommand, +) # a sample command line script to print all listings @@ -16,23 +16,42 @@ LoggerFactory.configure(logger_name="cli") # configure catalog module -catalog_container.config.from_dict(dict()) +container = Container() +container.config.from_dict( + dict( + DATABASE_URL="postgresql://postgres:password@localhost/postgres", + DEBUG=True, + ) +) # instantiate catalog module -catalog_module = catalog_container.module() +catalog_module = container.catalog_module() logger.info("Application configured") +# let's generate a fake seller id for now +seller_id = SellerRepository.next_id() + # interact with a catalog module by issuing queries and commands # use request context if you want to logically separate queries/commands # from each other in the logs with request_context: - query_result = catalog_module.execute_query(GetAllListingsQuery()) - logger.info("All listings: %s", query_result.data) + command = CreateListingDraftCommand( + title="Foo", description="Bar", price=1, seller_id=seller_id + ) + result = catalog_module.execute_command(command) + print(result) + if result.is_ok(): + logger.info("Draft added") + else: + logger.error(result.get_errors()) + +with request_context: + query_result = catalog_module.execute_query(GetAllListings()) + logger.info(f"All listings: {query_result.result}") with request_context: - seller_id = SellerRepository.next_id() query_result = catalog_module.execute_query( - GetListingsOfSellerQuery(seller_id=seller_id) + GetListingsOfSeller(seller_id=seller_id) ) - logger.info(f"Listings of seller {seller_id} %s", query_result.data) + logger.info(f"Listings of seller {seller_id}: {query_result.result}") diff --git a/src/config/container.py b/src/config/container.py index bf2768f..195053c 100644 --- a/src/config/container.py +++ b/src/config/container.py @@ -3,9 +3,14 @@ from sqlalchemy import create_engine from dependency_injector import containers, providers -from modules.catalog.infrastructure.persistence import MongoListingRepository +from modules.catalog.infrastructure.listing_repository import ( + PostgresJsonListingRepository, +) +from modules.catalog.module import CatalogModule from seedwork.infrastructure.request_context import RequestContext +from dependency_injector.wiring import inject + class DummyService: def __init__(self, config) -> None: @@ -15,20 +20,48 @@ def serve(self): return f"serving with config {self.config}" +def create_request_context(engine): + print("create_request_context") + from seedwork.infrastructure.request_context import request_context + + request_context.setup(engine) + return request_context + + +def create_engine_once(config): + engine = create_engine(config["DATABASE_URL"], echo=config["DEBUG"]) + from seedwork.infrastructure.database import Base + + # TODO: it seems like a hack, but it works... + Base.metadata.bind = engine + return engine + + class Container(containers.DeclarativeContainer): """Dependency Injection Container see https://github.com/ets-labs/python-dependency-injector for more details """ + __self__ = providers.Self() + config = providers.Configuration() - engine = providers.Singleton(create_engine, config.DATABASE_URL, echo=config.DEBUG) + engine = providers.Singleton(create_engine_once, config) dummy_service = providers.Factory(DummyService, config) dummy_singleton = providers.Singleton(DummyService, config) - request_context = providers.Singleton(RequestContext) + request_context: RequestContext = providers.Factory( + create_request_context, engine=engine + ) correlation_id = providers.Factory( lambda request_context: request_context.correlation_id.get(), request_context ) - listing_repository = providers.Factory(MongoListingRepository) + + # catalog module and it's dependencies + listing_repository = providers.Factory( + PostgresJsonListingRepository, db_session=request_context.provided.db_session + ) + catalog_module = providers.Factory( + CatalogModule, listing_repository=listing_repository + ) diff --git a/src/modules/__init__.py b/src/modules/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/modules/catalog/__init__.py b/src/modules/catalog/__init__.py index 24af660..e69de29 100644 --- a/src/modules/catalog/__init__.py +++ b/src/modules/catalog/__init__.py @@ -1,7 +0,0 @@ -from modules.catalog.container import CatalogModuleContainer - -""" -catalog_container is a composition root for a catalog module -Use `catalog_container.module()` to create an instance of CatalogModule -""" -catalog_container = CatalogModuleContainer() diff --git a/src/modules/catalog/application/command/create_listing_draft.py b/src/modules/catalog/application/command/create_listing_draft.py new file mode 100644 index 0000000..5570faf --- /dev/null +++ b/src/modules/catalog/application/command/create_listing_draft.py @@ -0,0 +1,28 @@ +from seedwork.application.commands import Command +from seedwork.domain.value_objects import Currency, UUID +from modules.catalog.domain.entities import Listing +from modules.catalog.domain.repositories import ListingRepository +from seedwork.application.command_handlers import CommandResult +from seedwork.application.decorators import command_handler + + +class CreateListingDraftCommand(Command): + """A command for creating new listing in draft state""" + + title: str + description: str + price: Currency + seller_id: UUID + + +@command_handler +def create_listing_draft( + command: CreateListingDraftCommand, repository: ListingRepository +) -> CommandResult: + listing = Listing(**command.dict()) + try: + repository.insert(listing) + except Exception as e: + return CommandResult.failed(message="Failed to create listing", exception=e) + + return CommandResult.ok(id=listing.id) diff --git a/src/modules/catalog/application/command/publish_listing.py b/src/modules/catalog/application/command/publish_listing.py new file mode 100644 index 0000000..f91fe2e --- /dev/null +++ b/src/modules/catalog/application/command/publish_listing.py @@ -0,0 +1,29 @@ +from seedwork.application.commands import Command +from seedwork.domain.value_objects import UUID +from modules.catalog.domain.repositories import ListingRepository, SellerRepository +from seedwork.application.command_handlers import CommandResult +from seedwork.application.decorators import command_handler + + +class PublishListingCommand(Command): + """A command for publishing a listing in draft state""" + + listing_id: UUID + seller_id: UUID + + +@command_handler +def publish_listing( + command: PublishListingCommand, + listing_repository: ListingRepository, + seller_repository: SellerRepository, +): + listing = listing_repository.get_by_id(command.listing_id) + seller = seller_repository.get_by_id(command.seller_id) + + seller.publish_listing(listing) + + # TODO: for now we need to manually persist the changes, but it should be handled automatically using "Unit of Work" + listing_repository.update(listing) + + return CommandResult.ok() diff --git a/src/modules/catalog/application/command/update_listing_draft.py b/src/modules/catalog/application/command/update_listing_draft.py new file mode 100644 index 0000000..356f445 --- /dev/null +++ b/src/modules/catalog/application/command/update_listing_draft.py @@ -0,0 +1,31 @@ +from seedwork.application.commands import Command +from seedwork.domain.value_objects import Currency, UUID +from modules.catalog.domain.repositories import ListingRepository +from seedwork.application.command_handlers import CommandResult +from seedwork.application.decorators import command_handler + + +class UpdateListingDraftCommand(Command): + """A command for updating a listing""" + + listing_id: UUID + title: str + description: str + price: Currency + modify_user_id: UUID + + +@command_handler +def update_listing_draft( + command: UpdateListingDraftCommand, repository: ListingRepository +) -> CommandResult: + listing = repository.get_by_id(command.listing_id) + listing.change_main_attributes( + title=command.title, description=command.description, price=command.price + ) + try: + repository.update(listing) + except: + return CommandResult.error("Failed to update listing") + + return CommandResult.ok() diff --git a/src/modules/catalog/application/command_handlers.py b/src/modules/catalog/application/command_handlers.py deleted file mode 100644 index cbfbcff..0000000 --- a/src/modules/catalog/application/command_handlers.py +++ /dev/null @@ -1,55 +0,0 @@ -from modules.catalog.application.commands import ( - CreateListingDraftCommand, - UpdateListingDraftCommand, - PublishListingCommand, -) -from modules.catalog.domain.entities import Listing -from modules.catalog.domain.repositories import ListingRepository, SellerRepository -from seedwork.application.command_handlers import CommandResult -from seedwork.application.decorators import command_handler - - -@command_handler -def create_listing_draft( - command: CreateListingDraftCommand, repository: ListingRepository -) -> CommandResult: - listing = Listing(**command.dict()) - try: - repository.insert(listing) - except: - return CommandResult.error("Failed to create listing") - - return CommandResult.ok(id=listing.id) - - -@command_handler -def update_listing_draft( - command: UpdateListingDraftCommand, repository: ListingRepository -) -> CommandResult: - listing = repository.get_by_id(command.listing_id) - listing.change_main_attributes( - title=command.title, description=command.description, price=command.price - ) - try: - repository.update(listing) - except: - return CommandResult.error("Failed to update listing") - - return CommandResult.ok() - - -@command_handler -def publish_listing( - command: PublishListingCommand, - listing_repository: ListingRepository, - seller_repository: SellerRepository, -): - listing = listing_repository.get_by_id(command.listing_id) - seller = seller_repository.get_by_id(command.seller_id) - - seller.publish_listing(listing) - - # TODO: for now we need to manually persist the changes, but it should be handled automatically using "Unit of Work" - listing_repository.update(listing) - - return CommandResult.ok() diff --git a/src/modules/catalog/application/commands.py b/src/modules/catalog/application/commands.py deleted file mode 100644 index f760588..0000000 --- a/src/modules/catalog/application/commands.py +++ /dev/null @@ -1,28 +0,0 @@ -from seedwork.application.commands import Command -from seedwork.domain.value_objects import Currency, UUID - - -class CreateListingDraftCommand(Command): - """A command for creating new listing in draft state""" - - title: str - description: str - price: Currency - seller_id: UUID - - -class UpdateListingDraftCommand(Command): - """A command for updating a listing""" - - listing_id: UUID - title: str - description: str - price: Currency - modify_user_id: UUID - - -class PublishListingCommand(Command): - """A command for publishing a listing in draft state""" - - listing_id: UUID - seller_id: UUID diff --git a/src/modules/catalog/application/queries.py b/src/modules/catalog/application/queries.py deleted file mode 100644 index f6137f3..0000000 --- a/src/modules/catalog/application/queries.py +++ /dev/null @@ -1,10 +0,0 @@ -from seedwork.domain.value_objects import UUID -from seedwork.application.queries import Query - - -class GetAllListingsQuery(Query): - ... - - -class GetListingsOfSellerQuery(Query): - seller_id: UUID diff --git a/src/modules/catalog/application/query/get_all_listings.py b/src/modules/catalog/application/query/get_all_listings.py new file mode 100644 index 0000000..28f1374 --- /dev/null +++ b/src/modules/catalog/application/query/get_all_listings.py @@ -0,0 +1,19 @@ +from seedwork.domain.value_objects import UUID +from seedwork.application.queries import Query +from seedwork.application.query_handlers import QueryResult +from seedwork.application.decorators import query_handler +from modules.catalog.domain.repositories import ListingRepository + + +class GetAllListings(Query): + ... + + +@query_handler +def get_all_listings( + query: GetAllListings, listing_repository: ListingRepository +) -> QueryResult: + queryset = listing_repository.session.query(listing_repository.model) + result = [dict(id=row.id, **row.data) for row in queryset.all()] + # TODO: add error handling + return QueryResult.ok(result) diff --git a/src/modules/catalog/application/query/get_listing_details.py b/src/modules/catalog/application/query/get_listing_details.py new file mode 100644 index 0000000..c1be430 --- /dev/null +++ b/src/modules/catalog/application/query/get_listing_details.py @@ -0,0 +1,21 @@ +from seedwork.domain.value_objects import UUID +from seedwork.application.queries import Query +from seedwork.application.query_handlers import QueryResult +from seedwork.application.decorators import query_handler + +from modules.catalog.domain.repositories import ListingRepository + + +class GetListingDetails(Query): + listing_id: UUID + + +@query_handler +def get_listing_details( + query: GetListingDetails, listing_repository: ListingRepository +) -> QueryResult: + queryset = listing_repository.session.query(listing_repository.model).filter_by( + id=query.listing_id + ) + result = [dict(id=row.id, **row.data) for row in queryset.all()][0] + return QueryResult.ok(result) diff --git a/src/modules/catalog/application/query/get_listings_of_seller.py b/src/modules/catalog/application/query/get_listings_of_seller.py new file mode 100644 index 0000000..3db8afa --- /dev/null +++ b/src/modules/catalog/application/query/get_listings_of_seller.py @@ -0,0 +1,21 @@ +from seedwork.domain.value_objects import UUID +from seedwork.application.queries import Query +from seedwork.application.query_handlers import QueryResult +from seedwork.application.decorators import query_handler +from modules.catalog.domain.repositories import ListingRepository + + +class GetListingsOfSeller(Query): + seller_id: UUID + + +@query_handler +def get_listings_of_seller( + query: GetListingsOfSeller, listing_repository: ListingRepository +) -> QueryResult: + queryset = listing_repository.session.query(listing_repository.model)\ + # .filter( + # listing_repository.model.data['seller'].astext.cast(UUID) == query.seller_id + # ) + result = [dict(id=row.id, **row.data) for row in queryset.all()] + return QueryResult.ok(result) diff --git a/src/modules/catalog/application/query_handlers.py b/src/modules/catalog/application/query_handlers.py deleted file mode 100644 index e7ad074..0000000 --- a/src/modules/catalog/application/query_handlers.py +++ /dev/null @@ -1,19 +0,0 @@ -from seedwork.application.queries import Query -from seedwork.application.query_handlers import QueryHandler, QueryResult -from seedwork.application.decorators import query_handler - -from ..application.queries import GetAllListingsQuery, GetListingsOfSellerQuery -from ..domain.repositories import ListingRepository - - -@query_handler -def get_all_listings(query: GetAllListingsQuery) -> QueryResult: - sql = "SELECT * FROM listings" - data = ["foo", "bar"] - return QueryResult.ok(data) - - -@query_handler -def get_listings_of_seller(query: GetListingsOfSellerQuery) -> QueryHandler: - data = ["foo"] - return QueryResult.ok(data) diff --git a/src/modules/catalog/container.py b/src/modules/catalog/container.py deleted file mode 100644 index e0ae75b..0000000 --- a/src/modules/catalog/container.py +++ /dev/null @@ -1,20 +0,0 @@ -""" -Each module has it's own DI composition root -""" - -from dependency_injector import containers, providers -from sqlalchemy import create_engine -from modules.catalog.module import CatalogModule -from modules.catalog.infrastructure.persistence import MongoListingRepository - - -class CatalogModuleContainer(containers.DeclarativeContainer): - """Dependency Injection Container - - see https://github.com/ets-labs/python-dependency-injector for more details - """ - - config = providers.Configuration() - engine = providers.Singleton(create_engine, config.DATABASE_URL, echo=config.DEBUG) - listing_repository = providers.Factory(MongoListingRepository) - module = providers.Singleton(CatalogModule) diff --git a/src/modules/catalog/infrastructure/listing_repository.py b/src/modules/catalog/infrastructure/listing_repository.py index 769c7be..dd9fcb0 100644 --- a/src/modules/catalog/infrastructure/listing_repository.py +++ b/src/modules/catalog/infrastructure/listing_repository.py @@ -1,12 +1,12 @@ -from sqlalchemy import Table from sqlalchemy.sql.schema import Column -from sqlalchemy.orm.session import Session -from sqlalchemy.sql.sqltypes import String, Integer, Text, Numeric -from sqlalchemy.dialects.postgresql import UUID +from contextvars import ContextVar +from sqlalchemy.orm import Session +from sqlalchemy_json import mutable_json_type +from sqlalchemy.dialects.postgresql import UUID, JSONB +import uuid from seedwork.infrastructure.database import Base -from modules.catalog.domain.value_objects import ListingStatus from modules.catalog.domain.repositories import ListingRepository from modules.catalog.domain.entities import Listing @@ -16,41 +16,33 @@ https://youtu.be/sO7FFPNvX2s?t=7214 """ -from sqlalchemy.orm import declarative_base - -class ListingModel(Base): +class CatalogListing(Base): __tablename__ = "catalog_listing" - id = Column(UUID(as_uuid=True), primary_key=True) - id = Column(String, primary_key=True) - title = Column(String(128)) - description = Column(Text) - price = Column(Numeric) - seller_id = Column(UUID(as_uuid=True)) - status = Column(String) - + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + data = Column(mutable_json_type(dbtype=JSONB, nested=True)) -class ListingMapper: - """Maps attributes betweer Listing and ListingModel""" - def model_to_entity(self, model: ListingModel) -> Listing: - data = model.__dict__ - return Listing(**data) +class JsonMapper: + ... - def entity_to_model(self, entity: Listing) -> ListingModel: - data = entity.dict() - return ListingModel(**data) - -class SqlListingRepository(ListingRepository): +class PostgresJsonListingRepository(ListingRepository): """Listing repository implementation""" - def __init__(self, db_session: Session, mapper=ListingMapper()) -> None: - self.session = db_session + model = CatalogListing + + def __init__(self, db_session: ContextVar, mapper=JsonMapper()): + self._session_cv = db_session self.mapper = mapper + @property + def session(self) -> Session: + return self._session_cv.get() + def get_by_id(self, id: UUID) -> Listing: - model = self.session.query(ListingModel).filter_by(id=str(id)).one() + model = self.session.query(CatalogListing).filter_by(id=str(id)).one() + print(model) entity = self.mapper.model_to_entity(model) return entity @@ -58,5 +50,11 @@ def insert(self, entity: Listing): model = self.mapper.entity_to_model(entity) self.session.add(model) + def update(self, entity: Listing): + ... + + def delete(self, entity: Listing): + ... + def count(self) -> int: - return self.session.query(ListingModel).count() + return self.session.query(self.model).count() diff --git a/src/modules/catalog/infrastructure/persistence.py b/src/modules/catalog/infrastructure/persistence.py deleted file mode 100644 index 15380ef..0000000 --- a/src/modules/catalog/infrastructure/persistence.py +++ /dev/null @@ -1,21 +0,0 @@ -from seedwork.domain.aggregates import Aggregate -from seedwork.domain.value_objects import UUID - -from modules.catalog.domain.repositories import ListingRepository - - -class MongoListingRepository(ListingRepository): - def get_by_id(self, id: UUID) -> Aggregate: - ... - - def get_by_id(self, id: UUID) -> Aggregate: - ... - - def insert(self, entity: Aggregate): - ... - - def update(self, entity: Aggregate): - ... - - def delete(self, entity_id: UUID): - ... diff --git a/src/modules/catalog/infrastructure/test_listing_repository.py b/src/modules/catalog/infrastructure/test_listing_repository.py index cdfc2d9..8481e60 100644 --- a/src/modules/catalog/infrastructure/test_listing_repository.py +++ b/src/modules/catalog/infrastructure/test_listing_repository.py @@ -1,19 +1,21 @@ from sqlalchemy import engine from seedwork.domain.value_objects import UUID from modules.catalog.domain.entities import Listing -from modules.catalog.infrastructure.listing_repository import SqlListingRepository +from modules.catalog.infrastructure.listing_repository import ( + PostgresJsonListingRepository, +) # engine = sqlalchemy.create_engine("") def test_listing_repo_is_empty(db_session): - repo = SqlListingRepository(db_session=db_session) + repo = PostgresJsonListingRepository(db_session=db_session) assert repo.count() == 0 def test_listing_persistence(db_session): original = Listing(title="red dragon", description="", price=1, seller_id=UUID.v4()) - repo = SqlListingRepository(db_session=db_session) + repo = PostgresJsonListingRepository(db_session=db_session) repo.insert(original) db_session.commit() diff --git a/src/modules/catalog/module.py b/src/modules/catalog/module.py index 219cb0b..5926ee3 100644 --- a/src/modules/catalog/module.py +++ b/src/modules/catalog/module.py @@ -1,56 +1,52 @@ -from dependency_injector.wiring import inject, Provide - -from seedwork.application.decorators import query_handler from seedwork.application.modules import BusinessModule -from seedwork.application.commands import Command -from seedwork.application.command_handlers import CommandResult -from seedwork.application.queries import Query -from seedwork.application.query_handlers import QueryResult -from seedwork.infrastructure.repository import InMemoryRepository -from seedwork.infrastructure.logging import logger - -from modules.catalog.domain.repositories import ListingRepository + from .domain.repositories import ListingRepository -from .application.queries import GetAllListingsQuery, GetListingsOfSellerQuery -from .application.query_handlers import get_all_listings, get_listings_of_seller +from modules.catalog.application.query.get_all_listings import ( + GetAllListings, + get_all_listings, +) + +from modules.catalog.application.query.get_listings_of_seller import ( + GetListingsOfSeller, + get_listings_of_seller, +) -from .application.commands import CreateListingDraftCommand -from .application.command_handlers import create_listing_draft +from modules.catalog.application.query.get_listing_details import ( + GetListingDetails, + get_listing_details, +) + +from modules.catalog.application.command.create_listing_draft import ( + CreateListingDraftCommand, + create_listing_draft, +) class CatalogModule(BusinessModule): - listing_repository: ListingRepository + query_handlers = { + GetAllListings: lambda self, q: get_all_listings(q, self.listing_repository), + GetAllListings: lambda self, q: get_all_listings(q, self.listing_repository), + GetListingDetails: lambda self, q: get_listing_details( + q, self.listing_repository + ), + GetListingsOfSeller: lambda self, q: get_listings_of_seller(q, self.listing_repository) + } + command_handlers = { + CreateListingDraftCommand: lambda self, c: create_listing_draft( + c, self.listing_repository + ), + } - @inject def __init__( self, - listing_repository: ListingRepository = Provide["listing_repository"], + listing_repository: ListingRepository, ) -> None: self.listing_repository = listing_repository - def execute_command(self, command: Command) -> CommandResult: - assert isinstance(command, Command) - # in the future we will certailny need something smarter than that (i.e. mediator) - if type(command) is CreateListingDraftCommand: - return create_listing_draft( - command=command, repository=self.listing_repository - ) - raise NotImplementedError(f"No command handler for {type(command)} command") - - def execute_query(self, query: Query) -> QueryResult: - logger.debug("Executing query %s" % type(query)) - assert isinstance(query, Query) - # in the future we will certailny need something smarter than that (i.e. mediator) - if type(query) is GetAllListingsQuery: - return get_all_listings(query) - if type(query) is GetListingsOfSellerQuery: - return get_listings_of_seller(query) - raise NotImplementedError(f"No query handler for {type(query)} query") - @staticmethod def create(container): - """Factory method for creating a module from a DI container""" + """Factory method for creating a module by using dependencies from a DI container""" return CatalogModule( logger=container.logger(), listing_repository=container.listing_repository(), diff --git a/src/modules/catalog/tests/application/test_command_handlers.py b/src/modules/catalog/tests/application/test_command_handlers.py index ff5a86f..411140b 100644 --- a/src/modules/catalog/tests/application/test_command_handlers.py +++ b/src/modules/catalog/tests/application/test_command_handlers.py @@ -1,5 +1,4 @@ -import pytest -from modules.catalog.application.commands import ( +from modules.catalog.application.create_listing_draft.commands import ( CreateListingDraftCommand, UpdateListingDraftCommand, PublishListingCommand, diff --git a/src/seedwork/application/command_handlers.py b/src/seedwork/application/command_handlers.py index 569c31a..42e85ce 100644 --- a/src/seedwork/application/command_handlers.py +++ b/src/seedwork/application/command_handlers.py @@ -1,20 +1,28 @@ -from pydantic import errors - - class CommandResult: - def __init__(self, **kwargs) -> None: + def __init__(self, result) -> None: + self.__result = result self.__errors = [] - self.__kwargs = kwargs - def __getattr__(self, attr): + # commands + def add_error(self, message, exception=None): + self.__errors.append((message, exception)) + return self + + # queries + @property + def result(self): + """Shortcut to get_result()""" + return self.get_result() + + def get_result(self): + """Gets result""" assert ( not self.has_errors() - ), f"Cannot access '{attr}'. CommandResult has errors.\n Errors: {self.__errors}" - return self.__kwargs[attr] + ), f"Cannot access result. QueryResult has errors.\n Errors: {self.__errors}" + return self.__result - def add_error(self, error): - self.__errors.append(error) - return self + def get_errors(self): + return self.__errors def has_errors(self): return len(self.__errors) > 0 @@ -23,22 +31,22 @@ def is_ok(self): return not self.has_errors() @classmethod - def ok(cls, **kwargs): - return CommandResult(**kwargs) + def ok(cls, result): + """Creates a successful result""" + return cls(result=result) @classmethod - def errors(cls, errors): - result = CommandResult() - for error in errors: - result.add_error(error) + def failed(cls, message="Failure", exception=None): + """Creates a failed result""" + result = cls(result=None) + result.add_error(message, exception) return result - @classmethod - def errors(cls, errors): - result = CommandResult() - for error in errors: - result.add_error(error) - return result + +class QueryHandler: + """ + Base query handler class + """ class CommandHandler: diff --git a/src/seedwork/application/modules.py b/src/seedwork/application/modules.py index 6935989..2b64273 100644 --- a/src/seedwork/application/modules.py +++ b/src/seedwork/application/modules.py @@ -1,22 +1,61 @@ +from seedwork.infrastructure.logging import logger from .commands import Command +from .queries import Query from .command_handlers import CommandResult +def logging_handler(fn): + def handle(query_or_command, *args, **kwargs): + logger.debug("%s handling started" % type(query_or_command).__name__) + result = fn(query_or_command, *args, **kwargs) + if result.is_ok(): + logger.debug( + f"{type(query_or_command).__name__} handling succeeded with result: {result.get_result()}" + ) + else: + logger.warn( + f"{type(query_or_command).__name__} handling failed with errors: {result.get_errors()}" + ) + return result + + return handle + + class BusinessModule: """ Base class for creating business modules. As a rule of thumb, each module should expose a minimal set of operations via an interface that acts as a facade between the module and an external world. + + query_handlers: a mapping between Query and `lambda self, query: query_handler(query, ...)` + command_handlers: a mapping between Command and `lambda self, command: command_handler(command, ...)` """ + query_handlers = {} + command_handlers = {} + def __init__(self) -> None: self.setup() def setup(self): ... + @logging_handler + def execute_query(self, query: Query): + assert isinstance(query, Query), "Provided query must subclass Query" + try: + handler = self.query_handlers[type(query)] + except KeyError: + raise NotImplementedError(f"No query handler for {type(query)} in {type(self)}") + + return handler(self, query) + + @logging_handler def execute_command(self, command: Command) -> CommandResult: - raise NotImplementedError(type(command)) + assert isinstance(command, Command), "Provided query must subclass Query" + try: + handler = self.command_handlers[type(command)] + except KeyError: + raise NotImplementedError(f"No command handler for {type(command)} in {type(self)}") - def execute_query(self, command: Command): - raise NotImplementedError() + return handler(self, command) diff --git a/src/seedwork/application/query_handlers.py b/src/seedwork/application/query_handlers.py index d4d1730..2376c04 100644 --- a/src/seedwork/application/query_handlers.py +++ b/src/seedwork/application/query_handlers.py @@ -1,20 +1,25 @@ -from pydantic import errors - - class QueryResult: - def __init__(self, data) -> None: - self.data = data + def __init__(self, result) -> None: + self.__result = result self.__errors = [] - def __getattr__(self, attr): + # commands + def add_error(self, message, exception=None): + self.__errors.append((message, exception)) + return self + + # queries + @property + def result(self): + """Shortcut to get_result()""" + return self.get_result() + + def get_result(self): + """Gets result""" assert ( not self.has_errors() - ), f"Cannot access '{attr}'. QueryResult has errors.\n Errors: {self.__errors}" - return self.__kwargs[attr] - - def add_error(self, error): - self.__errors.append(error) - return self + ), f"Cannot access result. QueryResult has errors.\n Errors: {self.__errors}" + return self.__result def has_errors(self): return len(self.__errors) > 0 @@ -23,21 +28,15 @@ def is_ok(self): return not self.has_errors() @classmethod - def ok(cls, data): - return QueryResult(data) - - @classmethod - def errors(cls, errors): - result = QueryResult() - for error in errors: - result.add_error(error) - return result + def ok(cls, result): + """Creates a successful result""" + return cls(result=result) @classmethod - def errors(cls, errors): - result = QueryResult() - for error in errors: - result.add_error(error) + def failed(cls, message="Failure", exception=None): + """Creates a failed result""" + result = cls() + result.add_error(message, exception) return result diff --git a/src/seedwork/infrastructure/logging.py b/src/seedwork/infrastructure/logging.py index f99188b..1bb9d60 100644 --- a/src/seedwork/infrastructure/logging.py +++ b/src/seedwork/infrastructure/logging.py @@ -15,7 +15,7 @@ def __init__(self, name: str, request_context) -> None: self.request_context = request_context def filter(self, record): - record.correlation_id = self.request_context.correlation_id + record.correlation_id = self.request_context.correlation_id.get() return True diff --git a/src/seedwork/infrastructure/request_context.py b/src/seedwork/infrastructure/request_context.py index b70a743..c7f711a 100644 --- a/src/seedwork/infrastructure/request_context.py +++ b/src/seedwork/infrastructure/request_context.py @@ -1,25 +1,66 @@ import uuid from contextvars import ContextVar +from sqlalchemy.orm import Session + + +class SimpleProxy: + def __init__(self, target): + self.target = target + + def __getattr__(self, item): + return self.target.__getattr__(item) + + def __setattr__(self, key, value): + return self.target.__setattr__(key, value) class RequestContext: _correlation_id: ContextVar[uuid.UUID] = ContextVar( - "correlation_id", default=uuid.UUID("00000000-0000-0000-0000-000000000000") + "_correlation_id", default=uuid.UUID("00000000-0000-0000-0000-000000000000") ) + _db_session: ContextVar[Session] = ContextVar("_db_session", default=None) + + def __init__(self): + self._engine = None + + def setup(self, engine): + """Use this method for late initialization (via dependency injection container)""" + self._engine = engine @property - def correlation_id(self): - return self._correlation_id.get() + def correlation_id(self) -> ContextVar[uuid.UUID]: + """Get current correlation as ContextVar""" + return self._correlation_id - def __enter__(self): + @property + def db_session(self) -> ContextVar[Session]: + """Get current db session as ContextVar""" + return self._db_session + + def begin_request(self): + print("beg req") self._correlation_id.set(uuid.uuid4()) + session = Session(self._engine) + session.begin() + self._db_session.set(session) + + def end_request(self, commit=True): + print("end req") + if commit: + self.db_session.get().commit() + else: + self.db_session.get().rollback() + + def __enter__(self): + self.begin_request() + return self - def __exit__(self, *args, **kwargs): - ... + def __exit__(self, exc_type, exc_value, traceback): + commit = exc_type is None + self.end_request(commit=commit) """ -A global request_context object, used by logger and .... +A global request_context object, used by logger .... """ request_context = RequestContext() -print("rq", request_context, id(request_context))