From 20220a49b90f25ed8ff175ca5294848fe17e86db Mon Sep 17 00:00:00 2001 From: jowilf Date: Thu, 11 Aug 2022 17:11:29 +0100 Subject: [PATCH] first commit --- .flake8 | 13 + .github/workflows/build-docs.yml | 21 + .github/workflows/publish.yml | 30 + .github/workflows/test.yml | 97 ++ .gitignore | 190 +++ LICENSE | 21 + README.md | 118 ++ docs/api/exceptions.md | 6 + docs/api/file.md | 2 + docs/api/processors.md | 2 + docs/api/storage-manager.md | 1 + docs/api/types.md | 2 + docs/api/validators.md | 4 + docs/index.md | 117 ++ docs/stylesheets/extra.css | 4 + docs/tutorial/quick-start.md | 146 ++ docs/tutorial/serving-files.md | 57 + docs/tutorial/setup-your-storage.md | 131 ++ docs/tutorial/using-files-in-models.md | 278 ++++ .../tutorial/quick-start/configure_storage.py | 9 + .../tutorial/quick-start/define_your_model.py | 19 + .../tutorial/quick-start/save_your_model.py | 40 + .../storage-manager/change_default_storage.py | 14 + .../storage-manager/multiple_storage.py | 23 + .../storage-manager/s3_container_example.py | 10 + .../001_filefield_example.py | 13 + .../002_imagefield_example.py | 13 + .../003_upload_storage.py | 37 + .../using-files-in-models/004_validators.py | 21 + .../using-files-in-models/005_thumbnail.py | 14 + .../006_relationships.py | 25 + .../007_multiple_file.py | 48 + examples/fastapi_app.py | 149 ++ examples/flask_app.py | 125 ++ examples/templates/index.html | 108 ++ mkdocs.yml | 63 + poetry.lock | 1369 +++++++++++++++++ pyproject.toml | 98 ++ scripts/coverage.sh | 8 + scripts/lint.sh | 9 + sqlalchemy_file/__init__.py | 5 + sqlalchemy_file/base.py | 47 + sqlalchemy_file/exceptions.py | 31 + sqlalchemy_file/file.py | 123 ++ sqlalchemy_file/helpers.py | 75 + sqlalchemy_file/mutable_list.py | 111 ++ sqlalchemy_file/processors.py | 136 ++ sqlalchemy_file/storage.py | 132 ++ sqlalchemy_file/stored_file.py | 38 + sqlalchemy_file/types.py | 330 ++++ sqlalchemy_file/validators.py | 231 +++ tests/__init__.py | 0 tests/test_content_type_validator.py | 105 ++ tests/test_image_field.py | 99 ++ tests/test_image_validator.py | 195 +++ tests/test_metadata.py | 48 + tests/test_multiple_field.py | 362 +++++ tests/test_multiple_storage.py | 56 + tests/test_mutable_list.py | 75 + tests/test_processor.py | 76 + tests/test_result_value.py | 123 ++ tests/test_single_field.py | 308 ++++ tests/test_size_validator.py | 88 ++ tests/test_sqlmodel.py | 167 ++ tests/test_storage_manager.py | 39 + tests/utils.py | 46 + 66 files changed, 6501 insertions(+) create mode 100644 .flake8 create mode 100644 .github/workflows/build-docs.yml create mode 100644 .github/workflows/publish.yml create mode 100644 .github/workflows/test.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 docs/api/exceptions.md create mode 100644 docs/api/file.md create mode 100644 docs/api/processors.md create mode 100644 docs/api/storage-manager.md create mode 100644 docs/api/types.md create mode 100644 docs/api/validators.md create mode 100644 docs/index.md create mode 100644 docs/stylesheets/extra.css create mode 100644 docs/tutorial/quick-start.md create mode 100644 docs/tutorial/serving-files.md create mode 100644 docs/tutorial/setup-your-storage.md create mode 100644 docs/tutorial/using-files-in-models.md create mode 100644 docs_src/tutorial/quick-start/configure_storage.py create mode 100644 docs_src/tutorial/quick-start/define_your_model.py create mode 100644 docs_src/tutorial/quick-start/save_your_model.py create mode 100644 docs_src/tutorial/storage-manager/change_default_storage.py create mode 100644 docs_src/tutorial/storage-manager/multiple_storage.py create mode 100644 docs_src/tutorial/storage-manager/s3_container_example.py create mode 100644 docs_src/tutorial/using-files-in-models/001_filefield_example.py create mode 100644 docs_src/tutorial/using-files-in-models/002_imagefield_example.py create mode 100644 docs_src/tutorial/using-files-in-models/003_upload_storage.py create mode 100644 docs_src/tutorial/using-files-in-models/004_validators.py create mode 100644 docs_src/tutorial/using-files-in-models/005_thumbnail.py create mode 100644 docs_src/tutorial/using-files-in-models/006_relationships.py create mode 100644 docs_src/tutorial/using-files-in-models/007_multiple_file.py create mode 100644 examples/fastapi_app.py create mode 100644 examples/flask_app.py create mode 100644 examples/templates/index.html create mode 100644 mkdocs.yml create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 scripts/coverage.sh create mode 100755 scripts/lint.sh create mode 100644 sqlalchemy_file/__init__.py create mode 100644 sqlalchemy_file/base.py create mode 100644 sqlalchemy_file/exceptions.py create mode 100644 sqlalchemy_file/file.py create mode 100644 sqlalchemy_file/helpers.py create mode 100644 sqlalchemy_file/mutable_list.py create mode 100644 sqlalchemy_file/processors.py create mode 100644 sqlalchemy_file/storage.py create mode 100644 sqlalchemy_file/stored_file.py create mode 100644 sqlalchemy_file/types.py create mode 100644 sqlalchemy_file/validators.py create mode 100644 tests/__init__.py create mode 100644 tests/test_content_type_validator.py create mode 100644 tests/test_image_field.py create mode 100644 tests/test_image_validator.py create mode 100644 tests/test_metadata.py create mode 100644 tests/test_multiple_field.py create mode 100644 tests/test_multiple_storage.py create mode 100644 tests/test_mutable_list.py create mode 100644 tests/test_processor.py create mode 100644 tests/test_result_value.py create mode 100644 tests/test_single_field.py create mode 100644 tests/test_size_validator.py create mode 100644 tests/test_sqlmodel.py create mode 100644 tests/test_storage_manager.py create mode 100644 tests/utils.py diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..e65cf84 --- /dev/null +++ b/.flake8 @@ -0,0 +1,13 @@ +[flake8] +max-line-length = 88 +max-complexity = 10 +select = C,E,F,W,B,B950 +ignore = E203,E501,W503 +exclude = + .git, + __pycache__, + *.egg-info, + .nox, + .pytest_cache, + .mypy_cache + __init__.py \ No newline at end of file diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml new file mode 100644 index 0000000..69fc703 --- /dev/null +++ b/.github/workflows/build-docs.yml @@ -0,0 +1,21 @@ +name: Build docs +on: + push: + branches: ["main"] +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v4" + with: + python-version: 3.8 + - name: Install poetry + run: | + python -m pip install --upgrade pip + curl -sSL https://install.python-poetry.org | python3 - + - name: Configure poetry + run: poetry config virtualenvs.create false + - name: Install dependencies + run: poetry install + - run: mkdocs gh-deploy --force diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..e8de77f --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,30 @@ +name: Publish on Pypi +on: + release: + types: + - created + +jobs: + publish: + name: "Publish release" + runs-on: "ubuntu-latest" + + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v4" + with: + python-version: 3.8 + - name: Install poetry + run: | + python -m pip install --upgrade pip + curl -sSL https://install.python-poetry.org | python3 - + - name: Configure poetry + run: poetry config virtualenvs.create false + - name: Install dependencies + run: poetry install + - name: Publish + env: + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + run: | + poetry config pypi-token.pypi $PYPI_TOKEN + poetry publish --build \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..493e4d9 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,97 @@ +name: Python package + +on: + push: + branches: ["main"] + pull_request: + branches: ["main"] + +jobs: + tests: + name: "Python ${{ matrix.python-version }}" + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10"] + + services: + postgres: + image: postgres:14-alpine + env: + POSTGRES_USER: username + POSTGRES_PASSWORD: password + POSTGRES_DB: test_db + ports: + - 5432:5432 + options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + + mysql: + image: mysql:8 + env: + MYSQL_ROOT_PASSWORD: password + MYSQL_USER: username + MYSQL_PASSWORD: password + MYSQL_DATABASE: test_db + ports: + - 3306:3306 + options: --health-cmd "mysqladmin ping" --health-interval 10s --health-timeout 5s --health-retries 10 + + minio: + image: minio/minio:edge-cicd + env: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + ports: + - 9000:9000 + + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install poetry + run: | + python -m pip install --upgrade pip + curl -sSL https://install.python-poetry.org | python3 - + - name: Configure poetry + run: poetry config virtualenvs.create false + - name: Install dependencies + run: poetry install + - name: Lint + run: poetry run bash scripts/lint.sh + - name: Test Local Storage provider & sqlite + env: + ENGINE: 'sqlite:///test.db?check_same_thread=False' + STORAGE_PROVIDER: 'LOCAL' + LOCAL_PATH: '/tmp/storage' + run: poetry run coverage run -m pytest tests + - name: Test Local Storage provider & postgresql + env: + ENGINE: 'postgresql+psycopg2://username:password@localhost:5432/test_db' + STORAGE_PROVIDER: 'LOCAL' + LOCAL_PATH: '/tmp/storage' + run: poetry run coverage run -m pytest tests + - name: Test Local Storage provider & mysql + env: + ENGINE: 'mysql+pymysql://username:password@localhost:3306/test_db' + STORAGE_PROVIDER: 'LOCAL' + LOCAL_PATH: '/tmp/storage' + run: poetry run coverage run -m pytest tests + - name: Test Minio Storage provider & sqlite memory + env: + ENGINE: 'sqlite:///:memory:?check_same_thread=False' + STORAGE_PROVIDER: 'MINIO' + MINIO_KEY: 'minioadmin' + MINIO_SECRET: 'minioadmin' + MINIO_HOST: 'localhost' + MINIO_PORT: 9000 + MINIO_SECURE: false + run: poetry run coverage run -m pytest tests + - name: Coverage Report + run: poetry run bash scripts/coverage.sh + - name: Upload coverage + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..045b0c3 --- /dev/null +++ b/.gitignore @@ -0,0 +1,190 @@ + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +pytestdebug.log + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +doc/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# Pipfile.lock + +# poetry +# poetry.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +*.env* +.env/ +.venv/ +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +pythonenv* + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/public +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# operating system-related files +# *.DS_Store +Thumbs.db + +# profiling data +.prof + +### VirtualEnv ### +# Virtualenv +[Bb]in +[Ii]nclude +[Ll]ib +[Ll]ib64 +[Ll]ocal +pyvenv.cfg +.venv +pip-selfcheck.json + +### VisualStudioCode ### +*.code-workspace +.vscode/* +!.vscode/tasks.json +!.vscode/launch.json +.history +.ionide + +.idea/ \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..6f06aa1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Jocelin Hounon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..eda507f --- /dev/null +++ b/README.md @@ -0,0 +1,118 @@ +# sqlalchemy-file + + +**SQLAlchemy-file** is a [SQLAlchemy](https://www.sqlalchemy.org/) extension for attaching files to SQLAlchemy model and +uploading them to various storage such as Amazon S3, Rackspace CloudFiles, Google Storage and others +using [Apache Libcloud](https://github.com/apache/libcloud). + +

+ + Package version + +

+ + +The key features are: + +* **Multiple Storage :** Use Object Storage API provided by [Apache Libcloud](https://github.com/apache/libcloud) to + store files. Therefore, you can store your files on Local Storage, Amazon S3, Google Cloud Storage, MinIO etc, and + easily switch between them. For a full list of supported providers + visit [supported providers page](https://libcloud.readthedocs.io/en/stable/storage/supported_providers.html) from Apache + Libcloud documentation. +* **Validator :** Provide an interface for validating each files before saving them. +* **Size Validator :** Built-in validator for file maximum `size` validation. +* **Content-Type Validator :** Built-in validator for file ``mimetype`` restrictions. +* **Image Validator :** Built-in validator for image `mimetype`, `width`, `height` and `ratio` validation. +* **Processor :** Provide an interface to easily save multiple transformation of the original files. +* **ThumbnailGenerator :** Built-in processor to auto generate thumbnail +* **Multiple Files :** You can attach multiple files directly to a Model. +* **Session awareness :** Whenever an object is deleted or a rollback is performed the files uploaded during the unit of + work or attached to the deleted objects are automatically deleted. +* **Meant for Evolution :** Change the storage provider anytime you want, old data will continue to work +* **SQLModel Support:** Tested with [SQLModel](https://github.com/tiangolo/sqlmodel) + +--- + +**Documentation**: [https://github.com/jowilf/sqlalchemy-file](https://github.com/jowilf/sqlalchemy-file) + +**Source Code**: [https://github.com/jowilf/sqlalchemy-file](https://github.com/jowilf/sqlalchemy-file) + +--- + +## Requirements + +A recent and currently supported version of Python (right +now, Python supports versions 3.7 and +above). + +As **SQLAlchemy-file** is based on **Apache Libcloud** and **SQLAlchemy**, it requires them. They will be automatically +installed when you install SQLAlchemy-file. + +## Installation + +### PIP + +```shell +$ pip install sqlalchemy-file +``` + +### Poetry + +```shell +$ poetry add sqlalchemy-file +``` + +## Example + +Attaching files to models is as simple as declaring a field on the model itself + +```Python +import os + +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String, create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session + +from sqlalchemy_file import FileField, File +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() + + +# Define your model +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + +# Configure Storage +os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) +container = LocalStorageDriver("/tmp/storage").get_container("attachment") +StorageManager.add_storage("default", container) + +# Save your model +engine = create_engine( + "sqlite:///example.db", connect_args={"check_same_thread": False} +) +Base.metadata.create_all(engine) + +with Session(engine) as session: + session.add(Attachment(name="attachment1", content=open("./example.txt", "rb"))) + session.add(Attachment(name="attachment2", content=b"Hello world")) + session.add(Attachment(name="attachment3", content="Hello world")) + file = File(content="Hello World", filename="hello.txt", content_type="text/plain") + session.add(Attachment(name="attachment4", content=file)) + session.commit() + +``` + +## Related projects and inspirations + +* [Depot: ](https://github.com/amol-/depot) When I was looking for a library like this, depot was the +best I saw. But it supports few storage backend, doesn't support multiple files, doesn't work with +[SQLModel](https://github.com/tiangolo/sqlmodel) and doesn't provide a way to validate files. This project inspired **SQLAlchemy-file** extensively +and some features are implemented the same. diff --git a/docs/api/exceptions.md b/docs/api/exceptions.md new file mode 100644 index 0000000..aff0c08 --- /dev/null +++ b/docs/api/exceptions.md @@ -0,0 +1,6 @@ +::: sqlalchemy_file.exceptions.ValidationError +::: sqlalchemy_file.exceptions.SizeValidationError +::: sqlalchemy_file.exceptions.ContentTypeValidationError +::: sqlalchemy_file.exceptions.InvalidImageError +::: sqlalchemy_file.exceptions.DimensionValidationError +::: sqlalchemy_file.exceptions.AspectRatioValidationError \ No newline at end of file diff --git a/docs/api/file.md b/docs/api/file.md new file mode 100644 index 0000000..965681d --- /dev/null +++ b/docs/api/file.md @@ -0,0 +1,2 @@ +::: sqlalchemy_file.base.BaseFile +::: sqlalchemy_file.file.File \ No newline at end of file diff --git a/docs/api/processors.md b/docs/api/processors.md new file mode 100644 index 0000000..c71540d --- /dev/null +++ b/docs/api/processors.md @@ -0,0 +1,2 @@ +::: sqlalchemy_file.processors.Processor +::: sqlalchemy_file.processors.ThumbnailGenerator \ No newline at end of file diff --git a/docs/api/storage-manager.md b/docs/api/storage-manager.md new file mode 100644 index 0000000..52a7625 --- /dev/null +++ b/docs/api/storage-manager.md @@ -0,0 +1 @@ +::: sqlalchemy_file.storage.StorageManager diff --git a/docs/api/types.md b/docs/api/types.md new file mode 100644 index 0000000..83b376d --- /dev/null +++ b/docs/api/types.md @@ -0,0 +1,2 @@ +::: sqlalchemy_file.types.FileField +::: sqlalchemy_file.types.ImageField diff --git a/docs/api/validators.md b/docs/api/validators.md new file mode 100644 index 0000000..94f8806 --- /dev/null +++ b/docs/api/validators.md @@ -0,0 +1,4 @@ +::: sqlalchemy_file.validators.Validator +::: sqlalchemy_file.validators.SizeValidator +::: sqlalchemy_file.validators.ContentTypeValidator +::: sqlalchemy_file.validators.ImageValidator \ No newline at end of file diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..e862cfe --- /dev/null +++ b/docs/index.md @@ -0,0 +1,117 @@ +# Overview + +**SQLAlchemy-file** is a [SQLAlchemy](https://www.sqlalchemy.org/) extension for attaching files to SQLAlchemy model and +uploading them to various storage such as Amazon S3, Rackspace CloudFiles, Google Storage and others +using [Apache Libcloud](https://github.com/apache/libcloud). + +

+ + Package version + +

+ + +The key features are: + +* **Multiple Storage :** Use Object Storage API provided by [Apache Libcloud](https://github.com/apache/libcloud) to + store files. Therefore, you can store your files on Local Storage, Amazon S3, Google Cloud Storage, MinIO etc, and + easily switch between them. For a full list of supported providers + visit [supported providers page](https://libcloud.readthedocs.io/en/stable/storage/supported_providers.html) from Apache + Libcloud documentation. +* **Validator :** Provide an interface for validating each files before saving them. +* **Size Validator :** Built-in validator for file maximum `size` validation. +* **Content-Type Validator :** Built-in validator for file ``mimetype`` restrictions. +* **Image Validator :** Built-in validator for image `mimetype`, `width`, `height` and `ratio` validation. +* **Processor :** Provide an interface to easily save multiple transformation of the original files. +* **ThumbnailGenerator :** Built-in processor to auto generate thumbnail +* **Multiple Files :** You can attach multiple files directly to a Model. +* **Session awareness :** Whenever an object is deleted or a rollback is performed the files uploaded during the unit of + work or attached to the deleted objects are automatically deleted. +* **Meant for Evolution :** Change the storage provider anytime you want, old data will continue to work +* **SQLModel Support:** Tested with [SQLModel](https://github.com/tiangolo/sqlmodel) + +--- + +**Documentation**: [https://github.com/jowilf/sqlalchemy-file](https://github.com/jowilf/sqlalchemy-file) + +**Source Code**: [https://github.com/jowilf/sqlalchemy-file](https://github.com/jowilf/sqlalchemy-file) + +--- + +## Requirements + +A recent and currently supported version of Python (right +now, Python supports versions 3.7 and +above). + +As **SQLAlchemy-file** is based on **Apache Libcloud** and **SQLAlchemy**, it requires them. They will be automatically +installed when you install SQLAlchemy-file. + +## Installation + +### PIP + +```shell +$ pip install sqlalchemy-file +``` + +### Poetry + +```shell +$ poetry add sqlalchemy-file +``` + +## Example + +Attaching files to models is as simple as declaring a field on the model itself + +```Python +import os + +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String, create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session + +from sqlalchemy_file import FileField, File +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() + + +# Define your model +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + +# Configure Storage +os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) +container = LocalStorageDriver("/tmp/storage").get_container("attachment") +StorageManager.add_storage("default", container) + +# Save your model +engine = create_engine( + "sqlite:///example.db", connect_args={"check_same_thread": False} +) +Base.metadata.create_all(engine) + +with Session(engine) as session: + session.add(Attachment(name="attachment1", content=open("./example.txt", "rb"))) + session.add(Attachment(name="attachment2", content=b"Hello world")) + session.add(Attachment(name="attachment3", content="Hello world")) + file = File(content="Hello World", filename="hello.txt", content_type="text/plain") + session.add(Attachment(name="attachment4", content=file)) + session.commit() + +``` + +## Related projects and inspirations + +* [Depot: ](https://github.com/amol-/depot) When I was looking for a library like this, depot was the +best I saw. But it offers less storage backend, doesn't support multiple files and doesn't work with +[SQLModel](https://github.com/tiangolo/sqlmodel). This project inspired **SQLAlchemy-file** extensively +and some features are implemented the same. diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css new file mode 100644 index 0000000..0b1d1dd --- /dev/null +++ b/docs/stylesheets/extra.css @@ -0,0 +1,4 @@ +:root > * { + --md-primary-fg-color: #0073BB; + --md-primary-fg-color--dark: #005c96; +} \ No newline at end of file diff --git a/docs/tutorial/quick-start.md b/docs/tutorial/quick-start.md new file mode 100644 index 0000000..a8a5fd4 --- /dev/null +++ b/docs/tutorial/quick-start.md @@ -0,0 +1,146 @@ +# Quick Start + +## Installation + +You can simply install **SQLAlchemy-file** from the PyPi: + +### PIP + +```shell +$ pip install sqlalchemy-file +``` + +### Poetry + +```shell +$ poetry add sqlalchemy-file +``` + +## Usage + +Getting SQLAlchemy-file setup in your code is really easy: + +* Add [FileField][sqlalchemy_file.types.FileField] Column to your SQLAlchemy Model + +!!! info + When `upload_storage` is not specified, [FileField][sqlalchemy_file.types.FileField] will use the default storage which is the first added storage + +```Python hl_lines="14 4" +from sqlalchemy import Column, Integer, String, create_engine +from sqlalchemy.ext.declarative import declarative_base + +from sqlalchemy_file import FileField + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + +engine = create_engine( + "sqlite:///example.db", connect_args={"check_same_thread": False} +) +Base.metadata.create_all(engine) + +``` + +* Configure Storage + +**SQLAlchemy-file** store files through Apache +Libcloud [Object Storage API](https://libcloud.readthedocs.io/en/stable/storage/index.html) .The `StorageManager` is the +entity in charge of configuring and handling file storages inside your application. To start uploading files, add at +least one storage. + +This can be done by using [StorageManager.add_storage()][sqlalchemy_file.storage.StorageManager.add_storage] which accepts a storage name (used to identify the storage in +case of multiple storages) +and the Apache Libcloud container which will be use for this storage. + +!!! note + The first added storage will be used as default storage + +```Python hl_lines="3 23-25" +import os + +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base + +from sqlalchemy_file import FileField +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() + + +# Define your model +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + +# Configure Storage +os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) +container = LocalStorageDriver("/tmp/storage").get_container("attachment") +StorageManager.add_storage("default", container) + + +``` + +* Save your model + +You can attach ``str``, ``bytes`` or any python ``file`` object to the column + +**SQLAlchemy-file** will try to guess filename and content-type from attached file but you can use +`sqlalchemy_file.File` object to provide custom filename and content-type + +```Python hl_lines="36-38 40-41" +import os + +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String, create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session + +from sqlalchemy_file import FileField, File +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() + + +# Define your model +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + +# Configure Storage +os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) +container = LocalStorageDriver("/tmp/storage").get_container("attachment") +StorageManager.add_storage("default", container) + + +# Save your model +engine = create_engine( + "sqlite:///example.db", connect_args={"check_same_thread": False} +) +Base.metadata.create_all(engine) + +with Session(engine) as session: + session.add(Attachment(name="attachment1", content=open("./example.txt", "rb"))) + session.add(Attachment(name="attachment2", content=b"Hello world")) + session.add(Attachment(name="attachment3", content="Hello world")) + # Use sqlalchemy_file.File object to provide custom filename and content_type + file = File(content="Hello World", filename="hello.txt", content_type="text/plain") + session.add(Attachment(name="attachment4", content=file)) + session.commit() +``` \ No newline at end of file diff --git a/docs/tutorial/serving-files.md b/docs/tutorial/serving-files.md new file mode 100644 index 0000000..f1beb28 --- /dev/null +++ b/docs/tutorial/serving-files.md @@ -0,0 +1,57 @@ +# Serving Files + +## With FastApi +```Python +@app.get("/medias/{storage}/{file_id}", response_class=FileResponse) +def serving_files(storage: str = Path(...), file_id: str = Path(...)): + try: + file = StorageManager.get_file(f"{storage}/{file_id}") + if isinstance(file.object.driver, LocalStorageDriver): + """If file is stored in local storage, just return a + FileResponse with the fill full path.""" + return FileResponse( + file.get_cdn_url(), media_type=file.content_type, filename=file.filename + ) + elif file.get_cdn_url() is not None: + """If file has public url, redirect to this url""" + return RedirectResponse(file.get_cdn_url()) + else: + """Otherwise, return a streaming response""" + return StreamingResponse( + file.object.as_stream(), + media_type=file.content_type, + headers={"Content-Disposition": f"attachment;filename={file.filename}"}, + ) + except ObjectDoesNotExistError: + return JSONResponse({"detail": "Not found"}, status_code=404) +``` +See Full example [here]() + +## With Flask +```Python +@app.route("/medias//") +def serving_files(storage, file_id): + try: + file = StorageManager.get_file(f"{storage}/{file_id}") + if isinstance(file.object.driver, LocalStorageDriver): + """If file is stored in local storage, just return a + FileResponse with the fill full path.""" + return send_file( + file.get_cdn_url(), + mimetype=file.content_type, + download_name=file.filename, + ) + elif file.get_cdn_url() is not None: + """If file has public url, redirect to this url""" + return app.redirect(file.get_cdn_url()) + else: + """Otherwise, return a streaming response""" + return app.response_class( + file.object.as_stream(), + mimetype=file.content_type, + headers={"Content-Disposition": f"attachment;filename={file.filename}"}, + ) + except ObjectDoesNotExistError: + abort(404) +``` +See Full example [here]() diff --git a/docs/tutorial/setup-your-storage.md b/docs/tutorial/setup-your-storage.md new file mode 100644 index 0000000..eed14c3 --- /dev/null +++ b/docs/tutorial/setup-your-storage.md @@ -0,0 +1,131 @@ +#Setup your storage + +[StorageManager][sqlalchemy_file.storage.StorageManager] is the class which takes care of managing the whole Storage environment for the application. + +## Terminology + +**`Container:`** represents a container which can contain multiple objects. You can think of it as a folder on a file +system. Difference between container and a folder on file system is that containers cannot be nested. Some APIs and +providers (e.g. AWS) refer to it as a Bucket. + +**`Object:`** represents an object or so-called BLOB. (**SQLAlchemy-file** will store each file as an object) + +For more information, +follow [Apache Libcloud Documentation](https://libcloud.readthedocs.io/en/stable/storage/index.html) + +## Add Storage + +Before adding a storage, the first thing you need is to setup an apache libcloud storage container. + +=== "Local" + + ```Python + import os + from libcloud.storage.drivers.local import LocalStorageDriver + from sqlalchemy_file.storage import StorageManager + + os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) # Make sure the directory exist + my_container = LocalStorageDriver("/tmp/storage").get_container("attachment") + StorageManager.add_storage("default", container) + + ``` +=== "MinIO" + + ```Python + from libcloud.storage.types import Provider + from libcloud.storage.types import ContainerAlreadyExistsError + from libcloud.storage.providers import get_driver + + cls = get_driver(Provider.MINIO) + driver = cls("api key", "api secret key", secure=False, host="127.0.0.1", port=9000) + + try: + driver.create_container(container_name="attachment") + except ContainerAlreadyExistsError: + pass + + my_container = driver.get_container(container_name="attachment") + + ``` +=== "S3" + + ```Python + from libcloud.storage.providers import get_driver + from libcloud.storage.types import Provider + + cls = get_driver(Provider.S3) + driver = cls("api key", "api secret key") + + my_container = driver.get_container(container_name="attachment") + + ``` +For more examples, see [Apache Libcloud Storage Examples](https://libcloud.readthedocs.io/en/stable/storage/examples.html) + +Then, you can easily add your container to the storage manager + +!!! example + + ```Python + + from sqlalchemy_file.storage import StorageManager + + StorageManager.add_storage("default", my_container) + ``` + +## Using Multiple Storages + +Multiple storage can be used inside the same application, most common operations require the full file path, so you can +use multiple storage without risk of collisions. + +```Python +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import declarative_base +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content_first = Column(FileField(upload_storage="first")) + content_second = Column(FileField(upload_storage="second")) + + +first_container = LocalStorageDriver("./storage").get_container("first") +second_container = LocalStorageDriver("./storage").get_container("second") + +StorageManager.add_storage("first", first_container) +StorageManager.add_storage("second", second_container) + +``` + +## Switching Default Storage + +Once you started uploading files to a storage, it is best to avoid configuring another storage to the same name. Doing +that will probably break all the previously uploaded files and will cause confusion. + +If you want to switch to a different storage for saving your files just configure two storage giving the new storage an +unique name and switch the default storage using the `StorageManager.set_default()` function. + +```Python +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy_file.storage import StorageManager + +first_container = LocalStorageDriver("./storage").get_container("first") +second_container = LocalStorageDriver("./storage").get_container("second") + +StorageManager.add_storage("first", first_container) +StorageManager.add_storage("second", second_container) + +assert StorageManager.get_default() == "first" + +StorageManager.set_default("second") + +assert StorageManager.get_default() == "second" + +``` \ No newline at end of file diff --git a/docs/tutorial/using-files-in-models.md b/docs/tutorial/using-files-in-models.md new file mode 100644 index 0000000..9fd4495 --- /dev/null +++ b/docs/tutorial/using-files-in-models.md @@ -0,0 +1,278 @@ +# Using files in models + +Attaching files to models is as simple as declaring a field on the model itself. + +## Fields +You can use two Column type in your model. + +### FileField + +[FileField][sqlalchemy_file.types.FileField] is the main field, that can be used in your model to accept any files. + +!!! example + ```Python + from sqlalchemy import Column, Integer, String, create_engine + from sqlalchemy.ext.declarative import declarative_base + + from sqlalchemy_file import FileField + + Base = declarative_base() + + class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + ``` +### ImageField + +Inherits all attributes and methods from [FileField][sqlalchemy_file.types.FileField], but also validates that the +uploaded file is a valid image. +!!! info + Using [ImageField][sqlalchemy_file.types.ImageField] is like + using [FileField][sqlalchemy_file.types.FileField] + with [ImageValidator][sqlalchemy_file.validators.ImageValidator] + +!!! example + ```Python + from sqlalchemy import Column, Integer, String + from sqlalchemy.ext.declarative import declarative_base + + from sqlalchemy_file import ImageField + + Base = declarative_base() + + + class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column(ImageField) + ``` +## Uploaded Files Information +Whenever a supported object is assigned to a [FileField][sqlalchemy_file.types.FileField] or [ImageField][sqlalchemy_file.types.ImageField] +it will be converted to a [File][sqlalchemy_file.file.File] object. + +This is the same object you will get back when reloading the models from database and apart from the file itself which is accessible +through the `.file` property, it provides additional attributes described into the [File][sqlalchemy_file.file.File] documentation itself. + +## Uploading on a Specific Storage + +By default all the files are uploaded on the default storage which is the first added storage. This can be changed +by passing a `upload_storage` argument explicitly on field declaration: + +```Python +from libcloud.storage.providers import get_driver +from libcloud.storage.types import Provider +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy_file import FileField +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() +# Amazon S3 Container +amazon_s3_container = get_driver(Provider.S3)( + "api key", "api secret key" +).get_container("example") + +# MinIO Container +min_io_container = get_driver(Provider.MINIO)( + "api key", "api secret key" +).get_container("example") + +# Configure Storage +StorageManager.add_storage("amazon_s3_storage", amazon_s3_container) +StorageManager.add_storage("min_io_storage", min_io_container) + + +class AttachmentS3(Base): + __tablename__ = "attachment_s3" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField(upload_storage="amazon_s3_storage")) + + +class AttachmentMinIO(Base): + __tablename__ = "attachment_min_io" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content_min_io = Column(FileField(upload_storage="min_io_storage")) + +``` + +## Validators + +File validators get executed just before saving the uploaded file. + +They can raise [ValidationError][sqlalchemy_file.exceptions.ValidationError] when +the uploaded files are not compliant with the validator conditions. + +Multiple validators can be chained together to validate one file. + +Validators can add additional properties to the file object. For example +[ImageValidator][sqlalchemy_file.validators.ImageValidator] add `width` and `height` to +the file object. + +**SQLAlchemy-file** has built-in validators to get started, but you can create your own validator +by extending [ValidationError][sqlalchemy_file.exceptions.ValidationError] base class. + +Built-in validators: + +1. [SizeValidator][sqlalchemy_file.validators.SizeValidator] : Validate file maximum size +2. [ContentTypeValidator][sqlalchemy_file.validators.ContentTypeValidator]: Validate file mimetype +3. [ImageValidator][sqlalchemy_file.validators.ImageValidator]: Validate image + +!!! example + ```Python + from sqlalchemy import Column, Integer, String + from sqlalchemy.ext.declarative import declarative_base + + from sqlalchemy_file import FileField + from sqlalchemy_file.validators import ContentTypeValidator, SizeValidator + + Base = declarative_base() + + + class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column( + FileField( + validators=[ + SizeValidator("500k"), + ContentTypeValidator(["text/plain", "text/csv"]), + ] + ) + ) + ``` + +## Processors +File processors get executed just after saving the uploaded file. They can be use +to generate additional files and attach it to the column. For example, [ThumbnailGenerator][sqlalchemy_file.processors.ThumbnailGenerator] +generate thumbnail from original image. + +Multiple processors can be chained together. They will be executed in order. + + +Processors can add additional properties to the file object. For example +[ThumbnailGenerator][sqlalchemy_file.processors.ThumbnailGenerator] add generated +`thumbnail` file information into the file object. + +!!! example + ```Python + from sqlalchemy import Column, Integer, String + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy_file import ImageField + from sqlalchemy_file.processors import ThumbnailGenerator + + Base = declarative_base() + + + class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column(ImageField(processors=[ThumbnailGenerator()])) + ``` + +## Multiple Files + +The best way to handle multiple files, is to use SQLAlchemy relationships + +!!! example + ```Python + from sqlalchemy import Column, ForeignKey, Integer, String + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.orm import relationship + from sqlalchemy_file import FileField + + Base = declarative_base() + + + class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + article_id = Column(Integer, ForeignKey("article.id")) + + + class Article(Base): + __tablename__ = "article" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + + attachments = relationship(Attachment, cascade="all, delete-orphan") + ``` + +However, if you want to save multiple files directly in your model, set +`multiple=True` on field declaration: + +```Python hl_lines="18 36-45" +import os + +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String, create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session +from sqlalchemy_file import File, FileField +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + multiple_content = Column(FileField(multiple=True)) + + +# Configure Storage +os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) +container = LocalStorageDriver("/tmp/storage").get_container("attachment") +StorageManager.add_storage("default", container) + +# Save your model +engine = create_engine( + "sqlite:///example.db", connect_args={"check_same_thread": False} +) +Base.metadata.create_all(engine) + +with Session(engine) as session: + session.add( + Attachment( + name="attachment1", + multiple_content=[ + "from str", + b"from bytes", + open("./example.txt", "rb"), + File( + content="Hello World", + filename="hello.txt", + content_type="text/plain", + ), + ], + ) + ) + session.commit() +``` + +Validators and processors will be applied to each file, and the return models +is a list of [File][sqlalchemy_file.file.File] object. + +## Session Awareness + +Whenever an object is deleted or a rollback is performed the files uploaded during the unit of work or attached to +the deleted objects are automatically deleted. \ No newline at end of file diff --git a/docs_src/tutorial/quick-start/configure_storage.py b/docs_src/tutorial/quick-start/configure_storage.py new file mode 100644 index 0000000..99a66f7 --- /dev/null +++ b/docs_src/tutorial/quick-start/configure_storage.py @@ -0,0 +1,9 @@ +import os + +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy_file.storage import StorageManager + +# Configure Storage +os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) +container = LocalStorageDriver("/tmp/storage").get_container("attachment") +StorageManager.add_storage("default", container) diff --git a/docs_src/tutorial/quick-start/define_your_model.py b/docs_src/tutorial/quick-start/define_your_model.py new file mode 100644 index 0000000..908e2e0 --- /dev/null +++ b/docs_src/tutorial/quick-start/define_your_model.py @@ -0,0 +1,19 @@ +from sqlalchemy import Column, Integer, String, create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy_file import FileField + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + +engine = create_engine( + "sqlite:///example.db", connect_args={"check_same_thread": False} +) +Base.metadata.create_all(engine) diff --git a/docs_src/tutorial/quick-start/save_your_model.py b/docs_src/tutorial/quick-start/save_your_model.py new file mode 100644 index 0000000..c89d490 --- /dev/null +++ b/docs_src/tutorial/quick-start/save_your_model.py @@ -0,0 +1,40 @@ +import os + +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String, create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session +from sqlalchemy_file import File, FileField +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() + + +# Define your model +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + +# Configure Storage +os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) +container = LocalStorageDriver("/tmp/storage").get_container("attachment") +StorageManager.add_storage("default", container) + +# Save your model +engine = create_engine( + "sqlite:///example.db", connect_args={"check_same_thread": False} +) +Base.metadata.create_all(engine) + +with Session(engine) as session: + session.add(Attachment(name="attachment1", content=open("./example.txt", "rb"))) + session.add(Attachment(name="attachment2", content=b"Hello world")) + session.add(Attachment(name="attachment3", content="Hello world")) + # Use sqlalchemy_file.File object to provide custom filename and content_type + file = File(content="Hello World", filename="hello.txt", content_type="text/plain") + session.add(Attachment(name="attachment4", content=file)) + session.commit() diff --git a/docs_src/tutorial/storage-manager/change_default_storage.py b/docs_src/tutorial/storage-manager/change_default_storage.py new file mode 100644 index 0000000..6ff4c6e --- /dev/null +++ b/docs_src/tutorial/storage-manager/change_default_storage.py @@ -0,0 +1,14 @@ +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy_file.storage import StorageManager + +first_container = LocalStorageDriver("./storage").get_container("first") +second_container = LocalStorageDriver("./storage").get_container("second") + +StorageManager.add_storage("first", first_container) +StorageManager.add_storage("second", second_container) + +assert StorageManager.get_default() == "first" + +StorageManager.set_default("second") + +assert StorageManager.get_default() == "second" diff --git a/docs_src/tutorial/storage-manager/multiple_storage.py b/docs_src/tutorial/storage-manager/multiple_storage.py new file mode 100644 index 0000000..7e2a97b --- /dev/null +++ b/docs_src/tutorial/storage-manager/multiple_storage.py @@ -0,0 +1,23 @@ +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import declarative_base +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content_first = Column(FileField(upload_storage="first")) + content_second = Column(FileField(upload_storage="second")) + + +first_container = LocalStorageDriver("./storage").get_container("first") +second_container = LocalStorageDriver("./storage").get_container("second") + +StorageManager.add_storage("first", first_container) +StorageManager.add_storage("second", second_container) diff --git a/docs_src/tutorial/storage-manager/s3_container_example.py b/docs_src/tutorial/storage-manager/s3_container_example.py new file mode 100644 index 0000000..9b2d477 --- /dev/null +++ b/docs_src/tutorial/storage-manager/s3_container_example.py @@ -0,0 +1,10 @@ +from libcloud.storage.providers import get_driver +from libcloud.storage.types import Provider +from sqlalchemy_file.storage import StorageManager + +cls = get_driver(Provider.S3) +driver = cls("api key", "api secret key") + +my_container = driver.get_container(container_name="attachment") + +StorageManager.add_storage("default", my_container) diff --git a/docs_src/tutorial/using-files-in-models/001_filefield_example.py b/docs_src/tutorial/using-files-in-models/001_filefield_example.py new file mode 100644 index 0000000..ad42788 --- /dev/null +++ b/docs_src/tutorial/using-files-in-models/001_filefield_example.py @@ -0,0 +1,13 @@ +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy_file import FileField + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) diff --git a/docs_src/tutorial/using-files-in-models/002_imagefield_example.py b/docs_src/tutorial/using-files-in-models/002_imagefield_example.py new file mode 100644 index 0000000..7fc82a9 --- /dev/null +++ b/docs_src/tutorial/using-files-in-models/002_imagefield_example.py @@ -0,0 +1,13 @@ +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy_file import ImageField + +Base = declarative_base() + + +class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column(ImageField) diff --git a/docs_src/tutorial/using-files-in-models/003_upload_storage.py b/docs_src/tutorial/using-files-in-models/003_upload_storage.py new file mode 100644 index 0000000..e7261f8 --- /dev/null +++ b/docs_src/tutorial/using-files-in-models/003_upload_storage.py @@ -0,0 +1,37 @@ +from libcloud.storage.providers import get_driver +from libcloud.storage.types import Provider +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy_file import FileField +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() +# Amazon S3 Container +amazon_s3_container = get_driver(Provider.S3)( + "api key", "api secret key" +).get_container("example") + +# MinIO Container +min_io_container = get_driver(Provider.MINIO)( + "api key", "api secret key" +).get_container("example") + +# Configure Storage +StorageManager.add_storage("amazon_s3_storage", amazon_s3_container) +StorageManager.add_storage("min_io_storage", min_io_container) + + +class AttachmentS3(Base): + __tablename__ = "attachment_s3" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField(upload_storage="amazon_s3_storage")) + + +class AttachmentMinIO(Base): + __tablename__ = "attachment_min_io" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content_min_io = Column(FileField(upload_storage="min_io_storage")) diff --git a/docs_src/tutorial/using-files-in-models/004_validators.py b/docs_src/tutorial/using-files-in-models/004_validators.py new file mode 100644 index 0000000..f277cc2 --- /dev/null +++ b/docs_src/tutorial/using-files-in-models/004_validators.py @@ -0,0 +1,21 @@ +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy_file import FileField +from sqlalchemy_file.validators import ContentTypeValidator, SizeValidator + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column( + FileField( + validators=[ + SizeValidator("500k"), + ContentTypeValidator(["text/plain", "text/csv"]), + ] + ) + ) diff --git a/docs_src/tutorial/using-files-in-models/005_thumbnail.py b/docs_src/tutorial/using-files-in-models/005_thumbnail.py new file mode 100644 index 0000000..d469390 --- /dev/null +++ b/docs_src/tutorial/using-files-in-models/005_thumbnail.py @@ -0,0 +1,14 @@ +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy_file import ImageField +from sqlalchemy_file.processors import ThumbnailGenerator + +Base = declarative_base() + + +class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column(ImageField(processors=[ThumbnailGenerator()])) diff --git a/docs_src/tutorial/using-files-in-models/006_relationships.py b/docs_src/tutorial/using-files-in-models/006_relationships.py new file mode 100644 index 0000000..ef4da31 --- /dev/null +++ b/docs_src/tutorial/using-files-in-models/006_relationships.py @@ -0,0 +1,25 @@ +from sqlalchemy import Column, ForeignKey, Integer, String +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship +from sqlalchemy_file import FileField + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + article_id = Column(Integer, ForeignKey("article.id")) + + +class Article(Base): + __tablename__ = "article" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + + attachments = relationship(Attachment, cascade="all, delete-orphan") diff --git a/docs_src/tutorial/using-files-in-models/007_multiple_file.py b/docs_src/tutorial/using-files-in-models/007_multiple_file.py new file mode 100644 index 0000000..7cefc9e --- /dev/null +++ b/docs_src/tutorial/using-files-in-models/007_multiple_file.py @@ -0,0 +1,48 @@ +import os + +from libcloud.storage.drivers.local import LocalStorageDriver +from sqlalchemy import Column, Integer, String, create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import Session +from sqlalchemy_file import File, FileField +from sqlalchemy_file.storage import StorageManager + +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + multiple_content = Column(FileField(multiple=True)) + + +# Configure Storage +os.makedirs("/tmp/storage/attachment", 0o777, exist_ok=True) +container = LocalStorageDriver("/tmp/storage").get_container("attachment") +StorageManager.add_storage("default", container) + +# Save your model +engine = create_engine( + "sqlite:///example.db", connect_args={"check_same_thread": False} +) +Base.metadata.create_all(engine) + +with Session(engine) as session: + session.add( + Attachment( + name="attachment1", + multiple_content=[ + "from str", + b"from bytes", + open("./example.txt", "rb"), + File( + content="Hello World", + filename="hello.txt", + content_type="text/plain", + ), + ], + ) + ) + session.commit() diff --git a/examples/fastapi_app.py b/examples/fastapi_app.py new file mode 100644 index 0000000..6b644b3 --- /dev/null +++ b/examples/fastapi_app.py @@ -0,0 +1,149 @@ +import os +from typing import List, Optional, Union + +import uvicorn +from fastapi import Depends, FastAPI +from fastapi import File as FormFile +from fastapi import Form, Path, UploadFile +from libcloud.storage.drivers.local import LocalStorageDriver +from libcloud.storage.providers import get_driver +from libcloud.storage.types import ( + ContainerAlreadyExistsError, + ObjectDoesNotExistError, + Provider, +) +from pydantic import BaseModel +from sqlalchemy import Column +from sqlalchemy_file import File, ImageField +from sqlalchemy_file.exceptions import ValidationError +from sqlalchemy_file.processors import ThumbnailGenerator +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.validators import SizeValidator +from sqlmodel import Field, Session, SQLModel, create_engine, select +from starlette.responses import ( + FileResponse, + JSONResponse, + RedirectResponse, + StreamingResponse, +) + +engine = create_engine("sqlite:////tmp/example.db?check_same_thread=False", echo=True) + + +os.makedirs("/tmp/storage", 0o777, exist_ok=True) +driver = get_driver(Provider.LOCAL)("/tmp/storage") + +# cls = get_driver(Provider.MINIO) +# driver = cls("minioadmin", "minioadmin", secure=False, host="127.0.0.1", port=9000) + +try: + driver.create_container(container_name="category") +except ContainerAlreadyExistsError: + pass + +container = driver.get_container(container_name="category") + +StorageManager.add_storage("category", container) + + +class Thumbnail(BaseModel): + path: str + url: Optional[str] + + +class FileInfo(BaseModel): + filename: str + content_type: str + path: str + url: Optional[str] + thumbnail: Thumbnail + + +class CategoryBase(SQLModel): + id: Optional[int] = Field(None, primary_key=True) + name: str = Field(None, min_length=3, max_length=100) + + +class Category(CategoryBase, table=True): + image: Union[File, UploadFile, None] = Field( + sa_column=Column( + ImageField( + upload_storage="category", + validators=[SizeValidator(max_size="1M")], + processors=[ThumbnailGenerator(thumbnail_size=(200, 200))], + ) + ) + ) + + +class CategoryOut(CategoryBase): + image: Optional[FileInfo] + + +def category_form( + name: str = Form(...), + image: Optional[UploadFile] = FormFile(None), +): + return Category(name=name, image=image) + + +app = FastAPI(title="SQLAlchemy-file Example", debug=True) + + +@app.get("/categories", response_model=List[CategoryOut]) +def get_all(): + with Session(engine) as session: + return session.execute(select(Category)).all() + + +@app.get("/categories/{id}", response_model=CategoryOut) +def get_one(id: int = Path(...)): + with Session(engine) as session: + category = session.get(Category, id) + if category is not None: + return category + return JSONResponse({"detail": "Not found"}, status_code=404) + + +@app.post("/categories", response_model=CategoryOut) +def create_new(category: Category = Depends(category_form)): + with Session(engine) as session: + try: + session.add(category) + session.commit() + session.refresh(category) + return category + except ValidationError as e: + return JSONResponse( + dict(error={"key": e.key, "msg": e.msg}), status_code=422 + ) + + +@app.get("/medias/{storage}/{file_id}", response_class=FileResponse) +def serving_files(storage: str = Path(...), file_id: str = Path(...)): + try: + file = StorageManager.get_file(f"{storage}/{file_id}") + if isinstance(file.object.driver, LocalStorageDriver): + """If file is stored in local storage, just return a + FileResponse with the fill full path.""" + return FileResponse( + file.get_cdn_url(), media_type=file.content_type, filename=file.filename + ) + elif file.get_cdn_url() is not None: + """If file has public url, redirect to this url""" + return RedirectResponse(file.get_cdn_url()) + else: + """Otherwise, return a streaming response""" + return StreamingResponse( + file.object.as_stream(), + media_type=file.content_type, + headers={"Content-Disposition": f"attachment;filename={file.filename}"}, + ) + except ObjectDoesNotExistError: + return JSONResponse({"detail": "Not found"}, status_code=404) + + +if __name__ == "__main__": + SQLModel.metadata.create_all(engine) + uvicorn.run(app, port=8000) + # Navigate to http://127.0.0.1:8000/docs diff --git a/examples/flask_app.py b/examples/flask_app.py new file mode 100644 index 0000000..235ba8f --- /dev/null +++ b/examples/flask_app.py @@ -0,0 +1,125 @@ +import os + +from flask import Flask, abort, render_template, request, send_file +from flask_sqlalchemy import SQLAlchemy +from libcloud.storage.drivers.local import LocalStorageDriver +from libcloud.storage.providers import get_driver +from libcloud.storage.types import ( + ContainerAlreadyExistsError, + ObjectDoesNotExistError, + Provider, +) +from sqlalchemy_file import FileField, ImageField +from sqlalchemy_file.exceptions import ValidationError +from sqlalchemy_file.processors import ThumbnailGenerator +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.validators import ContentTypeValidator, SizeValidator + +app = Flask(__name__) +app.config[ + "SQLALCHEMY_DATABASE_URI" +] = "sqlite:////tmp/example.db?check_same_thread=False" +db = SQLAlchemy(app, engine_options={"echo": True}) + + +class Book(db.Model): + __tablename__ = "books" + isbn = db.Column(db.Integer, primary_key=True) + author = db.Column(db.String(100), nullable=False) + title = db.Column(db.String(100), nullable=False) + cover = db.Column( + ImageField( + upload_storage="images", + validators=[SizeValidator("16M")], + processors=[ThumbnailGenerator((50, 50))], + ) + ) + document = db.Column( + FileField( + upload_storage="documents", + validators=[ + SizeValidator("5M"), + ContentTypeValidator( + allowed_content_types=[ + "application/pdf", + "application/msword", + "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + ] + ), + ], + ) + ) + + +@app.route("/", methods=("GET", "POST")) +def index(): + error = None + if request.method == "POST": + try: + book = Book( + author=request.form["author"], + title=request.form["title"], + ) + if "cover" in request.files and request.files["cover"].filename != "": + book.cover = request.files["cover"] + if "document" in request.files and request.files["document"].filename != "": + book.document = request.files["document"] + db.session.add(book) + db.session.commit() + except ValidationError as err: + error = err + db.session.rollback() + return render_template( + "index.html", books=Book.query.all(), form=request.form, error=error + ) + + +@app.route("/medias//") +def serving_files(storage, file_id): + try: + file = StorageManager.get_file(f"{storage}/{file_id}") + if isinstance(file.object.driver, LocalStorageDriver): + """If file is stored in local storage, just return a + FileResponse with the fill full path.""" + return send_file( + file.get_cdn_url(), + mimetype=file.content_type, + download_name=file.filename, + ) + elif file.get_cdn_url() is not None: + """If file has public url, redirect to this url""" + return app.redirect(file.get_cdn_url()) + else: + """Otherwise, return a streaming response""" + return app.response_class( + file.object.as_stream(), + mimetype=file.content_type, + headers={"Content-Disposition": f"attachment;filename={file.filename}"}, + ) + except ObjectDoesNotExistError: + abort(404) + + +if __name__ == "__main__": + os.makedirs("/tmp/storage", 0o777, exist_ok=True) + driver = get_driver(Provider.LOCAL)("/tmp/storage") + + # cls = get_driver(Provider.MINIO) + # driver = cls("minioadmin", "minioadmin", secure=False, host="127.0.0.1", port=9000) + + try: + driver.create_container(container_name="images") + except ContainerAlreadyExistsError: + pass + try: + driver.create_container(container_name="documents") + except ContainerAlreadyExistsError: + pass + + StorageManager.add_storage("images", driver.get_container(container_name="images")) + StorageManager.add_storage( + "documents", driver.get_container(container_name="documents") + ) + + db.create_all() + app.run(debug=True) diff --git a/examples/templates/index.html b/examples/templates/index.html new file mode 100644 index 0000000..485d8eb --- /dev/null +++ b/examples/templates/index.html @@ -0,0 +1,108 @@ + + + + + + + Library + + + + + +
+
+ + + + + Library + +
+
+
+ {% if error%} + + {%endif%} +
+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+
+
+ + + + + + + + + + + + {% if books | length == 0%} + + + + {%else%} + {%for book in books%} + + + + + {% if book.cover%} + + {%endif%} + {% if book.document%} + + {%endif%} + + + {%endfor%} + {%endif%} + +
ISBNTitleAuthorCoverDocument
No data found.
{{book.isbn}}{{book.title}}{{book.author}} + {{book.cover.filename}} + + {%else%} + -{{book.document.filename}} + {%else%} + -
+
+
+
+ + \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..0cfda1b --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,63 @@ +site_name: SQLAlchemy File +theme: + name: material + icon: + repo: fontawesome/solid/file-export + palette: + - media: "(prefers-color-scheme: light)" + scheme: default + toggle: + icon: material/brightness-7 + name: Switch to dark mode + + - media: "(prefers-color-scheme: dark)" + scheme: slate + toggle: + icon: material/brightness-4 + name: Switch to light mode + features: + - search.suggest + - search.highlight + - navigation.tabs + - content.tabs.link + +extra_css: + - stylesheets/extra.css + +markdown_extensions: + - admonition + - pymdownx.details + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.superfences + - pymdownx.tabbed: + alternate_style: true + +nav: + - Overview: 'index.md' + - Tutorial - User Guide: + - Quick Start: 'tutorial/quick-start.md' + - Setup your storage: 'tutorial/setup-your-storage.md' + - Using files in models: 'tutorial/using-files-in-models.md' + - Serving files: 'tutorial/serving-files.md' + - API Reference: + - StorageManager: 'api/storage-manager.md' + - File: 'api/file.md' + - Types: 'api/types.md' + - Validators: 'api/validators.md' + - Processors: 'api/processors.md' + - Exceptions: 'api/exceptions.md' + + + +plugins: + - search + - mkdocstrings: + default_handler: python + handlers: + python: + rendering: + show_root_heading: true + show_source: false + watch: + - sqlalchemy_file \ No newline at end of file diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..a5d1a00 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1369 @@ +[[package]] +name = "anyio" +version = "3.6.1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + +[[package]] +name = "apache-libcloud" +version = "3.6.0" +description = "A standard Python library that abstracts away differences among multiple cloud provider APIs. For more information and documentation, please see https://libcloud.apache.org" +category = "main" +optional = false +python-versions = ">=3.6, <4" + +[package.dependencies] +requests = ">=2.26.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "22.1.0" +description = "Classes Without Boilerplate" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "black" +version = "22.6.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cached-property" +version = "1.5.2" +description = "A decorator for caching properties in classes." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "certifi" +version = "2022.6.15" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "2.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.6.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.5" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "coverage" +version = "6.4.3" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "37.0.4" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools_rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] + +[[package]] +name = "fastapi" +version = "0.79.0" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0" +starlette = "0.19.1" + +[package.extras] +all = ["requests (>=2.24.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "itsdangerous (>=1.1.0,<3.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "orjson (>=3.2.1,<4.0.0)", "email_validator (>=1.1.1,<2.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "typer (>=0.4.1,<0.5.0)", "pyyaml (>=5.3.1,<7.0.0)"] +test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "mypy (==0.910)", "flake8 (>=3.8.3,<4.0.0)", "black (==22.3.0)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "flask (>=1.1.2,<3.0.0)", "anyio[trio] (>=3.2.1,<4.0.0)", "types-ujson (==4.2.1)", "types-orjson (==3.6.2)", "types-dataclasses (==0.6.5)"] + +[[package]] +name = "fasteners" +version = "0.17.3" +description = "A python package that provides useful locks" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "filedepot" +version = "0.8.0" +description = "Toolkit for storing files and attachments in web applications" +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +testing = ["mock", "requests", "ming", "turbogears2", "boto3", "flaky", "boto", "coverage", "pillow", "webtest", "sqlalchemy"] + +[[package]] +name = "flake8" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" + +[[package]] +name = "flake8-bugbear" +version = "22.7.1" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=3.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] + +[[package]] +name = "flask" +version = "2.2.2" +description = "A simple framework for building complex web applications." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=8.0" +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.0" +Jinja2 = ">=3.0" +Werkzeug = ">=2.2.2" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-sqlalchemy" +version = "2.5.1" +description = "Adds SQLAlchemy support to your Flask application." +category = "dev" +optional = false +python-versions = ">= 2.7, != 3.0.*, != 3.1.*, != 3.2.*, != 3.3.*" + +[package.dependencies] +Flask = ">=0.10" +SQLAlchemy = ">=0.8.0" + +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["wheel", "flake8", "markdown", "twine"] + +[[package]] +name = "greenlet" +version = "1.1.2" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" + +[package.extras] +docs = ["sphinx"] + +[[package]] +name = "griffe" +version = "0.22.0" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cached-property = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +async = ["aiofiles (>=0.7,<1.0)"] + +[[package]] +name = "h11" +version = "0.13.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-metadata" +version = "4.2.0" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "jinja2" +version = "3.0.3" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markdown" +version = "3.3.5" +description = "Python implementation of Markdown." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mkdocs" +version = "1.2.4" +description = "Project documentation with Markdown." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +click = ">=3.3" +ghp-import = ">=1.0" +importlib-metadata = ">=3.10" +Jinja2 = ">=2.10.1" +Markdown = ">=3.2.1" +mergedeep = ">=1.3.4" +packaging = ">=20.5" +PyYAML = ">=3.10" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "0.4.1" +description = "Automatically link across pages in MkDocs." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +Markdown = ">=3.3" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-material" +version = "8.2.7" +description = "A Material Design theme for MkDocs" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +jinja2 = ">=2.11.1,<3.1" +markdown = ">=3.2" +mkdocs = ">=1.2.3" +mkdocs-material-extensions = ">=1.0" +pygments = ">=2.10" +pymdown-extensions = ">=9.0" + +[[package]] +name = "mkdocs-material-extensions" +version = "1.0.3" +description = "Extension pack for Python Markdown." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "mkdocstrings" +version = "0.19.0" +description = "Automatic documentation from sources, for MkDocs." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.2" +mkdocs-autorefs = ">=0.3.1" +mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""} +pymdown-extensions = ">=6.3" + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "0.7.1" +description = "A Python handler for mkdocstrings." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +griffe = ">=0.11.1" +mkdocstrings = ">=0.19" + +[[package]] +name = "mypy" +version = "0.971" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.9.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "pillow" +version = "9.2.0" +description = "Python Imaging Library (Fork)" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] +test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +testing = ["pytest-benchmark", "pytest"] +dev = ["tox", "pre-commit"] + +[[package]] +name = "psycopg2-binary" +version = "2.9.3" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pycodestyle" +version = "2.9.1" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pydantic" +version = "1.9.1" +description = "Data validation and settings management using python type hints" +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[package.dependencies] +typing-extensions = ">=3.7.4.3" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pygments" +version = "2.12.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pymdown-extensions" +version = "9.5" +description = "Extension pack for Python Markdown." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +markdown = ">=3.2" + +[[package]] +name = "pymysql" +version = "1.0.2" +description = "Pure Python MySQL Driver" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cryptography = {version = "*", optional = true, markers = "extra == \"rsa\""} + +[package.extras] +ed25519 = ["PyNaCl (>=1.4.0)"] +rsa = ["cryptography"] + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pytest" +version = "7.1.2" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-multipart" +version = "0.0.5" +description = "A streaming multipart parser for Python" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = ">=1.4.0" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "sniffio" +version = "1.2.0" +description = "Sniff out which async library your code is running under" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "sqlalchemy" +version = "1.4.39" +description = "Database Abstraction Library" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +aiomysql = ["greenlet (!=0.4.17)", "aiomysql"] +aiosqlite = ["typing_extensions (!=3.10.0.1)", "greenlet (!=0.4.17)", "aiosqlite"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4)"] +mariadb_connector = ["mariadb (>=1.0.1)"] +mssql = ["pyodbc"] +mssql_pymssql = ["pymssql"] +mssql_pyodbc = ["pyodbc"] +mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"] +mysql_connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"] +postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql_psycopg2binary = ["psycopg2-binary"] +postgresql_psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql (<1)", "pymysql"] +sqlcipher = ["sqlcipher3-binary"] + +[[package]] +name = "sqlalchemy2-stubs" +version = "0.0.2a24" +description = "Typing Stubs for SQLAlchemy 1.4" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = ">=3.7.4" + +[[package]] +name = "sqlmodel" +version = "0.0.6" +description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0.0" + +[package.dependencies] +pydantic = ">=1.8.2,<2.0.0" +SQLAlchemy = ">=1.4.17,<1.5.0" +sqlalchemy2-stubs = "*" + +[[package]] +name = "starlette" +version = "0.19.1" +description = "The little ASGI library that shines." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +anyio = ">=3.4.0,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +full = ["itsdangerous", "jinja2", "python-multipart", "pyyaml", "requests"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typed-ast" +version = "1.5.4" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "typing-extensions" +version = "4.3.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.11" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" + +[package.extras] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "uvicorn" +version = "0.18.2" +description = "The lightning-fast ASGI server." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +standard = ["websockets (>=10.0)", "httptools (>=0.4.0)", "watchfiles (>=0.13)", "python-dotenv (>=0.13)", "PyYAML (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "colorama (>=0.4)"] + +[[package]] +name = "watchdog" +version = "2.1.9" +description = "Filesystem events monitoring" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "werkzeug" +version = "2.2.2" +description = "The comprehensive WSGI web application library." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog"] + +[[package]] +name = "zipp" +version = "3.8.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "1.1" +python-versions = "^3.7" +content-hash = "c20e5505f3cc9980df1a2118290516d3eb014d0aa3fca60ef0d21ab3b802e0a9" + +[metadata.files] +anyio = [ + {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, + {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, +] +apache-libcloud = [] +atomicwrites = [] +attrs = [] +black = [ + {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"}, + {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"}, + {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"}, + {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"}, + {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"}, + {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"}, + {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"}, + {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"}, + {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"}, + {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"}, + {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"}, + {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"}, + {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"}, + {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"}, + {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"}, + {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"}, + {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"}, + {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"}, + {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"}, + {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"}, + {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"}, + {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, + {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, +] +cached-property = [] +certifi = [] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] +charset-normalizer = [] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] +coverage = [] +cryptography = [] +fastapi = [] +fasteners = [] +filedepot = [ + {file = "filedepot-0.8.0.tar.gz", hash = "sha256:25316ecd352e16524b4d321dbad08e9beb563d5de5447f3ed312daec5d011849"}, +] +flake8 = [] +flake8-bugbear = [] +flask = [] +flask-sqlalchemy = [] +ghp-import = [] +greenlet = [ + {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, + {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, + {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, + {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, + {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, + {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, + {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, + {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, + {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, + {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, + {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, + {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, + {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, + {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, + {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, + {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, + {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, + {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, + {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, + {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, + {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, + {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, + {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, + {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, + {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, + {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, + {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, + {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, + {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, + {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, + {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, + {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, + {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, + {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, + {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, + {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, + {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, +] +griffe = [] +h11 = [ + {file = "h11-0.13.0-py3-none-any.whl", hash = "sha256:8ddd78563b633ca55346c8cd41ec0af27d3c79931828beffb46ce70a379e7442"}, + {file = "h11-0.13.0.tar.gz", hash = "sha256:70813c1135087a248a4d38cc0e1a0181ffab2188141a93eaf567940c3957ff06"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +importlib-metadata = [] +iniconfig = [] +isort = [] +itsdangerous = [] +jinja2 = [] +markdown = [] +markupsafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +mccabe = [] +mergedeep = [] +mkdocs = [] +mkdocs-autorefs = [] +mkdocs-material = [] +mkdocs-material-extensions = [] +mkdocstrings = [] +mkdocstrings-python = [] +mypy = [] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +packaging = [] +pathspec = [ + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, +] +pillow = [] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] +pluggy = [] +psycopg2-binary = [ + {file = "psycopg2-binary-2.9.3.tar.gz", hash = "sha256:761df5313dc15da1502b21453642d7599d26be88bff659382f8f9747c7ebea4e"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:539b28661b71da7c0e428692438efbcd048ca21ea81af618d845e06ebfd29478"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e82d38390a03da28c7985b394ec3f56873174e2c88130e6966cb1c946508e65"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57804fc02ca3ce0dbfbef35c4b3a4a774da66d66ea20f4bda601294ad2ea6092"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:083a55275f09a62b8ca4902dd11f4b33075b743cf0d360419e2051a8a5d5ff76"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-manylinux_2_24_ppc64le.whl", hash = "sha256:0a29729145aaaf1ad8bafe663131890e2111f13416b60e460dae0a96af5905c9"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a79d622f5206d695d7824cbf609a4f5b88ea6d6dab5f7c147fc6d333a8787e4"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:090f3348c0ab2cceb6dfbe6bf721ef61262ddf518cd6cc6ecc7d334996d64efa"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a9e1f75f96ea388fbcef36c70640c4efbe4650658f3d6a2967b4cc70e907352e"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c3ae8e75eb7160851e59adc77b3a19a976e50622e44fd4fd47b8b18208189d42"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-win32.whl", hash = "sha256:7b1e9b80afca7b7a386ef087db614faebbf8839b7f4db5eb107d0f1a53225029"}, + {file = "psycopg2_binary-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:8b344adbb9a862de0c635f4f0425b7958bf5a4b927c8594e6e8d261775796d53"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:e847774f8ffd5b398a75bc1c18fbb56564cda3d629fe68fd81971fece2d3c67e"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68641a34023d306be959101b345732360fc2ea4938982309b786f7be1b43a4a1"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3303f8807f342641851578ee7ed1f3efc9802d00a6f83c101d21c608cb864460"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_aarch64.whl", hash = "sha256:e3699852e22aa68c10de06524a3721ade969abf382da95884e6a10ff798f9281"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-manylinux_2_24_ppc64le.whl", hash = "sha256:526ea0378246d9b080148f2d6681229f4b5964543c170dd10bf4faaab6e0d27f"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:b1c8068513f5b158cf7e29c43a77eb34b407db29aca749d3eb9293ee0d3103ca"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:15803fa813ea05bef089fa78835118b5434204f3a17cb9f1e5dbfd0b9deea5af"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:152f09f57417b831418304c7f30d727dc83a12761627bb826951692cc6491e57"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:404224e5fef3b193f892abdbf8961ce20e0b6642886cfe1fe1923f41aaa75c9d"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:1f6b813106a3abdf7b03640d36e24669234120c72e91d5cbaeb87c5f7c36c65b"}, + {file = "psycopg2_binary-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:2d872e3c9d5d075a2e104540965a1cf898b52274a5923936e5bfddb58c59c7c2"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:10bb90fb4d523a2aa67773d4ff2b833ec00857f5912bafcfd5f5414e45280fb1"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a52ecab70af13e899f7847b3e074eeb16ebac5615665db33bce8a1009cf33"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a29b3ca4ec9defec6d42bf5feb36bb5817ba3c0230dd83b4edf4bf02684cd0ae"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:12b11322ea00ad8db8c46f18b7dfc47ae215e4df55b46c67a94b4effbaec7094"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-manylinux_2_24_ppc64le.whl", hash = "sha256:53293533fcbb94c202b7c800a12c873cfe24599656b341f56e71dd2b557be063"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c381bda330ddf2fccbafab789d83ebc6c53db126e4383e73794c74eedce855ef"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d29409b625a143649d03d0fd7b57e4b92e0ecad9726ba682244b73be91d2fdb"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:183a517a3a63503f70f808b58bfbf962f23d73b6dccddae5aa56152ef2bcb232"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c4e4cfa45f5a60599d9cec5f46cd7b1b29d86a6390ec23e8eebaae84e64554"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:adf20d9a67e0b6393eac162eb81fb10bc9130a80540f4df7e7355c2dd4af9fba"}, + {file = "psycopg2_binary-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2f9ffd643bc7349eeb664eba8864d9e01f057880f510e4681ba40a6532f93c71"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:def68d7c21984b0f8218e8a15d514f714d96904265164f75f8d3a70f9c295667"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dffc08ca91c9ac09008870c9eb77b00a46b3378719584059c034b8945e26b272"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:280b0bb5cbfe8039205c7981cceb006156a675362a00fe29b16fbc264e242834"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:af9813db73395fb1fc211bac696faea4ca9ef53f32dc0cfa27e4e7cf766dcf24"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-manylinux_2_24_ppc64le.whl", hash = "sha256:63638d875be8c2784cfc952c9ac34e2b50e43f9f0a0660b65e2a87d656b3116c"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ffb7a888a047696e7f8240d649b43fb3644f14f0ee229077e7f6b9f9081635bd"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c9d5450c566c80c396b7402895c4369a410cab5a82707b11aee1e624da7d004"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:d1c1b569ecafe3a69380a94e6ae09a4789bbb23666f3d3a08d06bbd2451f5ef1"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8fc53f9af09426a61db9ba357865c77f26076d48669f2e1bb24d85a22fb52307"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-win32.whl", hash = "sha256:6472a178e291b59e7f16ab49ec8b4f3bdada0a879c68d3817ff0963e722a82ce"}, + {file = "psycopg2_binary-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:35168209c9d51b145e459e05c31a9eaeffa9a6b0fd61689b48e07464ffd1a83e"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-macosx_10_14_x86_64.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl", hash = "sha256:47133f3f872faf28c1e87d4357220e809dfd3fa7c64295a4a148bcd1e6e34ec9"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91920527dea30175cc02a1099f331aa8c1ba39bf8b7762b7b56cbf54bc5cce42"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887dd9aac71765ac0d0bac1d0d4b4f2c99d5f5c1382d8b770404f0f3d0ce8a39"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:1f14c8b0942714eb3c74e1e71700cbbcb415acbc311c730370e70c578a44a25c"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-manylinux_2_24_ppc64le.whl", hash = "sha256:7af0dd86ddb2f8af5da57a976d27cd2cd15510518d582b478fbb2292428710b4"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93cd1967a18aa0edd4b95b1dfd554cf15af657cb606280996d393dadc88c3c35"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bda845b664bb6c91446ca9609fc69f7db6c334ec5e4adc87571c34e4f47b7ddb"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:01310cf4cf26db9aea5158c217caa92d291f0500051a6469ac52166e1a16f5b7"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:99485cab9ba0fa9b84f1f9e1fef106f44a46ef6afdeec8885e0b88d0772b49e8"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-win32.whl", hash = "sha256:46f0e0a6b5fa5851bbd9ab1bc805eef362d3a230fbdfbc209f4a236d0a7a990d"}, + {file = "psycopg2_binary-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:accfe7e982411da3178ec690baaceaad3c278652998b2c45828aaac66cd8285f"}, +] +py = [] +pycodestyle = [] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pydantic = [ + {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"}, + {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"}, + {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"}, + {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"}, + {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"}, + {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"}, + {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"}, + {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"}, + {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, + {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, +] +pyflakes = [] +pygments = [] +pymdown-extensions = [] +pymysql = [ + {file = "PyMySQL-1.0.2-py3-none-any.whl", hash = "sha256:41fc3a0c5013d5f039639442321185532e3e2c8924687abe6537de157d403641"}, + {file = "PyMySQL-1.0.2.tar.gz", hash = "sha256:816927a350f38d56072aeca5dfb10221fe1dc653745853d30a216637f5d7ad36"}, +] +pyparsing = [] +pytest = [] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-multipart = [ + {file = "python-multipart-0.0.5.tar.gz", hash = "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43"}, +] +pyyaml = [] +pyyaml-env-tag = [] +requests = [] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sniffio = [ + {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, + {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, +] +sqlalchemy = [] +sqlalchemy2-stubs = [ + {file = "sqlalchemy2-stubs-0.0.2a24.tar.gz", hash = "sha256:e15c45302eafe196ed516f979ef017135fd619d2c62d02de9a5c5f2e59a600c4"}, + {file = "sqlalchemy2_stubs-0.0.2a24-py3-none-any.whl", hash = "sha256:f2399251d3d8f00a88659d711a449c855a0d4e977c7a9134e414f1459b9acc11"}, +] +sqlmodel = [ + {file = "sqlmodel-0.0.6-py3-none-any.whl", hash = "sha256:c5fd8719e09da348cd32ce2a5b6a44f289d3029fa8f1c9818229b6f34f1201b4"}, + {file = "sqlmodel-0.0.6.tar.gz", hash = "sha256:3b4f966b9671b24d85529d274e6c4dbc7753b468e35d2d6a40bd75cad1f66813"}, +] +starlette = [] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +typed-ast = [] +typing-extensions = [ + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, +] +urllib3 = [] +uvicorn = [] +watchdog = [] +werkzeug = [] +zipp = [] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..266ebdc --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,98 @@ +[tool.poetry] +name = "sqlalchemy-file" +version = "0.1.1" +description = """SQLAlchemy-file is a SQLAlchemy extension for attaching files + to SQLAlchemy model and uploading them to various storage such as Amazon S3, + Rackspace CloudFiles, Google Storage and others using Apache Libcloud.""" +authors = ["Jocelin Hounon "] +license = "MIT" +readme = "README.md" +homepage = "https://github.com/jowilf/sqlalchemy-file" +repository = "https://github.com/jowilf/sqlalchemy-file" +classifiers = [ + 'Development Status :: 4 - Beta', + "Framework :: AsyncIO", + 'Environment :: Web Environment', + 'Intended Audience :: Developers', + "License :: OSI Approved :: MIT License", + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + "Topic :: Database", + "Topic :: Database :: Database Engines/Servers", +] + +[tool.poetry.dependencies] +python = "^3.7" +SQLAlchemy = ">=1.4,<1.5.0" +apache-libcloud = "^3.6.0" + +[tool.poetry.dev-dependencies] +filedepot = "^0.8.0" +pytest = "^7.1.2" +sqlmodel = "^0.0.6" +Pillow = "^9.2.0" +fasteners = "^0.17.3" +black = "^22.6.0" +coverage = { extras = ["toml"], version = "^6.4.2" } +flake8 = "^5.0.4" +flake8-bugbear = "^22.7.1" +mypy = "^0.971" +isort = "^5.10.1" +mkdocs-material = "^8.2.7" +PyMySQL = { extras = ["rsa"], version = "^1.0.2" } +psycopg2-binary = "^2.9.3" +mkdocstrings = { extras = ["python"], version = "^0.19.0" } +fastapi = "^0.79.0" +uvicorn = "^0.18.2" +python-multipart = "^0.0.5" +Flask = "^2.2.2" +Flask-SQLAlchemy = "^2.5.1" + +[tool.coverage.report] +fail_under = 95 +show_missing = true +exclude_lines = [ + "pragma: no cover", + "if TYPE_CHECKING:", +] + +[tool.coverage.run] +parallel = true +command_line = "-m pytest" +source = ["sqlalchemy_file", "tests"] + +[tool.isort] +profile = "black" +known_third_party = ["sqlalchemy_file"] +skip_glob = [ + "sqlalchemy_file/__init__.py", +] +src_paths = ["sqlalchemy_file", "tests"] + +[tool.mypy] +disallow_any_generics = true +disallow_subclassing_any = true +disallow_untyped_calls = false +disallow_untyped_defs = true +disallow_incomplete_defs = true +check_untyped_defs = true +disallow_untyped_decorators = true +no_implicit_optional = true +warn_redundant_casts = true +warn_unused_ignores = true +warn_return_any = true +implicit_reexport = false +strict_equality = true +ignore_missing_imports = true +exclude = 'tests/' + + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/scripts/coverage.sh b/scripts/coverage.sh new file mode 100644 index 0000000..1382949 --- /dev/null +++ b/scripts/coverage.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -e +set -x + +coverage combine +coverage report --show-missing +coverage xml \ No newline at end of file diff --git a/scripts/lint.sh b/scripts/lint.sh new file mode 100755 index 0000000..458a214 --- /dev/null +++ b/scripts/lint.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +set -e +set -x + +mypy sqlalchemy_file +flake8 sqlalchemy_file tests docs_src +black sqlalchemy_file tests docs_src --check +isort sqlalchemy_file tests docs_src --check-only \ No newline at end of file diff --git a/sqlalchemy_file/__init__.py b/sqlalchemy_file/__init__.py new file mode 100644 index 0000000..72d4672 --- /dev/null +++ b/sqlalchemy_file/__init__.py @@ -0,0 +1,5 @@ +__version__ = "0.1.0" + +from .file import File as File +from .types import FileField as FileField +from .types import ImageField as ImageField diff --git a/sqlalchemy_file/base.py b/sqlalchemy_file/base.py new file mode 100644 index 0000000..384a6cf --- /dev/null +++ b/sqlalchemy_file/base.py @@ -0,0 +1,47 @@ +import typing +from typing import Any + + +class BaseFile(typing.Dict[str, Any]): + """ + Base class for file object. It keeps information on a content related to a specific storage. + It is a specialized dictionary that provides also attribute style access, + the dictionary parent permits easy encoding/decoding to JSON. + + """ + + def __getitem__(self, key: str) -> Any: + return dict.__getitem__(self, key) + + def __getattr__(self, name: str) -> Any: + try: + return self[name] + except KeyError: + raise AttributeError(name) + + def __setitem__(self, key: str, value: Any) -> None: + if getattr(self, "_frozen", False): + raise TypeError("Already saved files are immutable") + return dict.__setitem__(self, key, value) + + __setattr__ = __setitem__ + + def __delattr__(self, name: str) -> None: + if getattr(self, "_frozen", False): + raise TypeError("Already saved files are immutable") + + try: + del self[name] + except KeyError: + raise AttributeError(name) + + def __delitem__(self, key: str) -> None: + if object.__getattribute__(self, "_frozen"): + raise TypeError("Already saved files are immutable") + dict.__delitem__(self, key) + + def _freeze(self) -> None: + object.__setattr__(self, "_frozen", True) + + def _thaw(self) -> None: + object.__setattr__(self, "_frozen", False) diff --git a/sqlalchemy_file/exceptions.py b/sqlalchemy_file/exceptions.py new file mode 100644 index 0000000..3e9b92f --- /dev/null +++ b/sqlalchemy_file/exceptions.py @@ -0,0 +1,31 @@ +class ValidationError(Exception): + """Base class for ValidationError + Parameters: + key: Current Column key + msg: Validation error message + """ + + def __init__(self, key: str, msg: str): # pragma: no cover + super().__init__("%s: %s" % (key, msg)) + self.key = key + self.msg = msg + + +class SizeValidationError(ValidationError): + pass + + +class ContentTypeValidationError(ValidationError): + pass + + +class InvalidImageError(ValidationError): + pass + + +class DimensionValidationError(ValidationError): + pass + + +class AspectRatioValidationError(ValidationError): + pass diff --git a/sqlalchemy_file/file.py b/sqlalchemy_file/file.py new file mode 100644 index 0000000..c15eb9c --- /dev/null +++ b/sqlalchemy_file/file.py @@ -0,0 +1,123 @@ +import uuid +from datetime import datetime +from typing import Any, Dict, List, Optional + +from sqlalchemy_file.base import BaseFile +from sqlalchemy_file.helpers import ( + get_content_from_file_obj, + get_content_size_from_fileobj, + get_content_type_from_fileobj, + get_filename_from_fileob, +) +from sqlalchemy_file.processors import Processor +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.stored_file import StoredFile +from sqlalchemy_file.validators import Validator + + +class File(BaseFile): + """ + Takes a file as content and uploads it to the appropriate storage + according to the attached Column and file information into the + database as JSON. + + Default attributes provided for all ``File`` include: + + Attributes: + + filename (str): This is the name of the uploaded file + file_id: This is the generated UUID for the uploaded file + path: This is a compination of `upload_storage` and `file_id` separated by + `/`. This will be use later to retrieve the file + content_type: This is the content type of the uploaded file + uploaded_at (datetime): This is the upload date in ISO format + url (str): CDN url of the uploaded file + file: Only available for saved content, internally call + [StorageManager.get_file()][sqlalchemy_file.storage.StorageManager.get_file] + on path and return an instance of `StoredFile` + """ + + def __init__( + self, + content: Any, + filename: Optional[str] = None, + content_type: Optional[str] = None, + ) -> None: + super().__init__() + if isinstance(content, dict): + object.__setattr__(self, "original_content", None) + object.__setattr__(self, "saved", True) + self.update(content) + self._freeze() + else: + self.original_content = get_content_from_file_obj(content) + filename = filename or get_filename_from_fileob(content) + content_type = content_type or get_content_type_from_fileobj( + content, filename + ) + self.update( + { + "filename": filename, + "content_type": content_type, + "size": get_content_size_from_fileobj(self.original_content), + "files": [], + } + ) + self._thaw() + + def apply_validators(self, validators: List[Validator], key: str = "") -> None: + """Apply validators to current file""" + for validator in validators: + validator.process(self, key) + + def apply_processors( + self, + processors: List[Processor], + upload_storage: Optional[str] = None, + ) -> None: + """Apply processors to current file""" + for processor in processors: + processor.process(self, upload_storage) + self._freeze() + + def save_to_storage(self, upload_storage: Optional[str] = None) -> None: + """Save current file into provided `upload_storage`""" + stored_file = self.store_content( + self.original_content, + upload_storage, + metadata={"filename": self.filename, "content_type": self.content_type}, + ) + self["file_id"] = stored_file.name + self["uploaded_at"] = datetime.utcnow().isoformat() + self["path"] = "%s/%s" % (upload_storage, stored_file.name) + self["url"] = stored_file.get_cdn_url() + self["saved"] = True + + def store_content( + self, + content: Any, + upload_storage: Optional[str] = None, + name: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None, + ) -> StoredFile: + """Store content into provided `upload_storage` + with additional `metadata`. Can be use by processors + to store additional files. + """ + name = name or str(uuid.uuid4()) + stored_file = StorageManager.save_file(name, content, upload_storage, metadata) + self["files"].append("%s/%s" % (upload_storage, name)) + return stored_file + + def encode(self) -> Dict[str, Any]: + return {k: v for k, v in self.items() if k not in ["original_content"]} + + @classmethod + def decode(cls, data: Any) -> "File": + return cls(data) + + @property + def file(self) -> "StoredFile": + if self.get("saved", False): + return StorageManager.get_file(self["path"]) + raise RuntimeError("Only available for saved file") diff --git a/sqlalchemy_file/helpers.py b/sqlalchemy_file/helpers.py new file mode 100644 index 0000000..d3ed2fe --- /dev/null +++ b/sqlalchemy_file/helpers.py @@ -0,0 +1,75 @@ +import json +import mimetypes +import os +import re +from builtins import RuntimeError +from tempfile import SpooledTemporaryFile +from typing import Any, Dict, Union + +INMEMORY_FILESIZE = 1024 * 1024 + + +def get_metadata_file_obj(metadata: Dict[str, Any]) -> "SpooledTemporaryFile[bytes]": + f = SpooledTemporaryFile(INMEMORY_FILESIZE) + f.write(json.dumps(metadata).encode()) + f.seek(0) + return f + + +def get_content_from_file_obj(fileobj): # type: ignore + """Provides a real file object from file content + Converts ``str`` and ``bytes`` to an actual file. + """ + if isinstance(fileobj, (str, bytes)): + f = SpooledTemporaryFile(INMEMORY_FILESIZE) + f.write(fileobj.encode() if isinstance(fileobj, str) else fileobj) + f.seek(0) + return f + elif getattr(fileobj, "file", None) is not None: + return fileobj.file + return fileobj + + +def get_filename_from_fileob(fileobj): # type: ignore + if getattr(fileobj, "filename", None) is not None: + return fileobj.filename + elif getattr(fileobj, "name", None) is not None: + return os.path.basename(fileobj.name) + return "unnamed" + + +def get_content_type_from_fileobj(fileobj, filename): # type: ignore + if getattr(fileobj, "content_type", None) is not None: + return fileobj.content_type + return mimetypes.guess_type(filename, strict=False)[0] or "application/octet-stream" + + +def get_content_size_from_fileobj(file): # type: ignore + if hasattr(file, "size"): + return file.size + if hasattr(file, "name"): + try: + return os.path.getsize(file.name) + except (OSError, TypeError): + pass + if hasattr(file, "tell") and hasattr(file, "seek"): + pos = file.tell() + file.seek(0, os.SEEK_END) + size = file.tell() + file.seek(pos) + return size + raise RuntimeError("Unable to determine the file's size.") # pragma: no cover + + +def convert_size(size: Union[str, int]) -> int: + # convert size to number of bytes ex: 1k -> 1000; 1Ki->1024 + if isinstance(size, int): + return size + elif isinstance(size, str): + pattern = re.compile(r"^(\d+)\s*(k|([KM]i?))$") + m = re.fullmatch(pattern, size) + if m is None: + raise ValueError("Invalid size %s" % size) + value, si, _ = m.groups() + si_map = {"k": 1000, "K": 1000, "M": 1000**2, "Ki": 1024, "Mi": 1024**2} + return int(value) * si_map[si] diff --git a/sqlalchemy_file/mutable_list.py b/sqlalchemy_file/mutable_list.py new file mode 100644 index 0000000..a21ca8f --- /dev/null +++ b/sqlalchemy_file/mutable_list.py @@ -0,0 +1,111 @@ +import typing +from typing import Any, List, Tuple, TypeVar, no_type_check + +from sqlalchemy.ext.mutable import Mutable + +T = TypeVar("T", bound=Any) + + +class MutableList(Mutable, typing.List[T]): + """ + A list type that implements :class:`Mutable`. + + The :class:`MutableList` object implements a list that will + emit change events to the underlying mapping when the contents of + the list are altered, including when values are added or removed. + + This is a replication of default Mutablelist provide by SQLAlchemy. + The difference here is the properties _removed which keep every element + removed from the list in order to be able to delete them after commit + and keep them when session rolled back. + + """ + + def __init__(self, *args, **kwargs) -> None: # type: ignore + super(MutableList, self).__init__(*args, **kwargs) + self._removed: List[T] = [] + # logging.warning(('init', self._removed, args, kwargs)) + + @classmethod + def coerce(cls, key: Any, value: Any) -> Any: + if not isinstance(value, MutableList): + if isinstance(value, list): + return MutableList(value) + # this call will raise ValueError + return Mutable.coerce(key, value) # pragma: no cover + else: + return value # pragma: no cover + + @no_type_check + def __reduce_ex__(self, proto): # pragma: no cover + return self.__class__, (list(self),) + + # needed for backwards compatibility with + # older pickles + def __getstate__(self) -> Tuple[List[T], List[T]]: # pragma: no cover + return list(self), self._removed + + def __setstate__(self, state: Any) -> None: # pragma: no cover + self[:] = state[0] + self._removed = state[1] + + def __setitem__(self, index: Any, value: Any) -> None: + """Detect list set events and emit change events.""" + if isinstance(index, slice): + old_value = self[index] + else: + old_value = [self[index]] + list.__setitem__(self, index, value) + self.changed() + self._removed.extend(old_value) + + def __delitem__(self, index: Any) -> None: + """Detect list del events and emit change events.""" + if isinstance(index, slice): + old_value = self[index] + else: + old_value = [self[index]] + list.__delitem__(self, index) + self.changed() + self._removed.extend(old_value) + + def pop(self, *arg) -> "T": # type: ignore + result = list.pop(self, *arg) + self.changed() + self._removed.append(result) + return result + + def append(self, x: Any) -> None: + list.append(self, x) + self.changed() + + def extend(self, x: Any) -> None: + list.extend(self, x) + self.changed() + + @no_type_check + def __iadd__(self, x): + self.extend(x) + return self + + def insert(self, i: Any, x: Any) -> None: + list.insert(self, i, x) + self.changed() + + def remove(self, i: "T") -> None: + list.remove(self, i) + self._removed.append(i) + self.changed() + + def clear(self) -> None: + self._removed.extend(self) + list.clear(self) # type: ignore + self.changed() + + def sort(self, **kw: Any) -> None: + list.sort(self, **kw) + self.changed() + + def reverse(self) -> None: + list.reverse(self) # type: ignore + self.changed() diff --git a/sqlalchemy_file/processors.py b/sqlalchemy_file/processors.py new file mode 100644 index 0000000..9f880c9 --- /dev/null +++ b/sqlalchemy_file/processors.py @@ -0,0 +1,136 @@ +import mimetypes +from abc import abstractmethod +from tempfile import SpooledTemporaryFile +from typing import TYPE_CHECKING, Optional, Tuple + +from sqlalchemy_file.helpers import INMEMORY_FILESIZE + +if TYPE_CHECKING: + from sqlalchemy_file.file import File + + +class Processor: + """ + Interface that must be implemented by file processors. + Can be used to add additional data to the stored file or change it. + When file processors are run the file has already been stored. + + """ + + @abstractmethod + def process( + self, file: "File", upload_storage: Optional[str] = None + ) -> None: # pragma: no cover + """ + Should be overridden in inherited class + Parameters: + file: [File][sqlalchemy_file.file.File] object, + Use file.original_content to access uploaded file + upload_storage: pass this to + [file.store_content()][sqlalchemy_file.file.File.store_content] + to attach additional files to the original file + """ + pass + + +class ThumbnailGenerator(Processor): + """ + Generate thumbnail from original content. + + The default thumbnail format and size are `PNG@128x128`, those can be changed + by giving custom `thumbnail_size` and `thumbnail_format` + + !!! note + ThumbnailGenerator will add additional data + to the file object under the key `thumbnail`. + These data will be store in database. + + Properties available in `thumbnail` attribute + + - **file_id:** This is the ID of the uploaded thumbnail file + - **path:** This is a upload_storage/file_id path which can + be used with :meth:`StorageManager.get_file` to + retrieve the thumbnail file + - **width** This is the width of the thumbnail image + - **height:** This is the height of the thumbnail image + - **url:** Public url of the uploaded file provided + by libcloud method `Object.get_cdn_url()` + + Example: + ```Python + class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column(ImageField(processors=[ThumbnailGenerator()])) + ``` + + ```Python + def test_create_image_with_thumbnail(self, fake_image) -> None: + with Session(engine) as session: + from PIL import Image + + session.add(Book(title="Pointless Meetings", cover=fake_image)) + session.flush() + book = session.execute( + select(Book).where(Book.title == "Pointless Meetings") + ).scalar_one() + assert book.cover["thumbnail"] is not None + thumbnail = StorageManager.get_file(book.cover["thumbnail"]["path"]) + assert thumbnail is not None + thumbnail = Image.open(thumbnail) + assert max(thumbnail.width, thumbnail.height) == 128 + assert book.cover["thumbnail"]["width"] == thumbnail.width + assert book.cover["thumbnail"]["height"] == thumbnail.height + ``` + + + """ + + def __init__( + self, + thumbnail_size: Tuple[int, int] = (128, 128), + thumbnail_format: str = "PNG", + ) -> None: + super().__init__() + self.thumbnail_size = thumbnail_size + self.thumbnail_format = thumbnail_format + + def process(self, file: "File", upload_storage: Optional[str] = None) -> None: + from PIL import Image + + content = file.original_content + img = Image.open(content) + thumbnail = img.copy() + thumbnail.thumbnail(self.thumbnail_size) + output = SpooledTemporaryFile(INMEMORY_FILESIZE) + thumbnail.save(output, self.thumbnail_format) + output.seek(0) + width, height, content_type = ( + thumbnail.width, + thumbnail.height, + f"image/{self.thumbnail_format}".lower(), + ) + ext = mimetypes.guess_extension(content_type) + stored_file = file.store_content( + output, + upload_storage, + metadata={ + "filename": file["filename"] + f".thumbnail{width}x{height}{ext}", + "content_type": content_type, + "width": width, + "height": height, + }, + ) + file.update( + { + "thumbnail": { + "file_id": stored_file.name, + "width": width, + "height": height, + "path": "%s/%s" % (upload_storage, stored_file.name), + "url": stored_file.get_cdn_url(), + } + } + ) diff --git a/sqlalchemy_file/storage.py b/sqlalchemy_file/storage.py new file mode 100644 index 0000000..a4fecac --- /dev/null +++ b/sqlalchemy_file/storage.py @@ -0,0 +1,132 @@ +from typing import Any, Dict, Iterator, Optional + +from libcloud.storage.base import Container +from libcloud.storage.drivers.local import LocalStorageDriver +from libcloud.storage.types import ObjectDoesNotExistError +from sqlalchemy_file.helpers import get_metadata_file_obj +from sqlalchemy_file.stored_file import StoredFile + + +class StorageManager: + """ + Takes care of managing the whole Storage environment for the application. + + Use [add_storage][sqlalchemy_file.storage.StorageManager.add_storage] method + to add new `libcloud.storage.base.Container`and associate a name which + will be use later to retrieve this container. + + The first container will be used as default, to simplify code when you have + only one container. + + Use associated name as `upload_storage` for [FileField][sqlalchemy_file.types.FileField] + to store his files inside the corresponding container. + + """ + + _default_storage_name: Optional[str] = None + _storages: Dict[str, Container] = {} + + @classmethod + def set_default(cls, name: str) -> None: + """Replaces the current application default storage""" + if name not in cls._storages: + raise RuntimeError("%s storage has not been added" % (name,)) + cls._default_storage_name = name + + @classmethod + def get_default(cls) -> str: + """Gets the current application default storage""" + if cls._default_storage_name is None: + raise RuntimeError("No default storage has been added") + return cls._default_storage_name + + @classmethod + def add_storage(cls, name: str, container: Container) -> None: + """Add new storage""" + assert isinstance(container, Container), "Invalid container" + if name in cls._storages: + raise RuntimeError("Storage %s has already been added" % (name,)) + if cls._default_storage_name is None: + cls._default_storage_name = name + cls._storages[name] = container + + @classmethod + def get(cls, name: Optional[str] = None) -> Container: + """ + Gets the container instance associate to the name, + return default if name isn't provided + """ + if name is None and cls._default_storage_name is None: + raise RuntimeError("No default storage have been added") + elif name is None: + name = cls._default_storage_name + if name in cls._storages: + return cls._storages[name] + raise RuntimeError("%s storage has not been added" % (name,)) + + @classmethod + def save_file( + cls, + name: str, + content: Iterator[bytes], + upload_storage: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None, + headers: Optional[Dict[str, str]] = None, + ) -> StoredFile: + """Save file into provided `upload_storage`""" + container = cls.get(upload_storage) + if isinstance(container.driver, LocalStorageDriver): + obj = container.upload_object_via_stream(iterator=content, object_name=name) + if metadata is not None: + """ + Libcloud local storage driver doesn't support metadata, so the metadata + is saved in the same container with the combination of the original name + and `.metadata.json` as name + """ + container.upload_object_via_stream( + iterator=get_metadata_file_obj(metadata), + object_name=f"{name}.metadata.json", + ) + return StoredFile(obj) + else: + extra = {} + if metadata is not None: + if "content_type" in metadata: + extra["content_type"] = metadata["content_type"] + extra["meta_data"] = metadata + return StoredFile( + container.upload_object_via_stream( + iterator=content, object_name=name, extra=extra, headers=headers + ) + ) + + @classmethod + def get_file(cls, path: str) -> StoredFile: + """Retrieve the file with `provided` path + path is expected to be `storage_name/file_id` + """ + upload_storage, file_id = path.split("/") + return StoredFile(StorageManager.get(upload_storage).get_object(file_id)) + + @classmethod + def delete_file(cls, path: str) -> bool: + """Delete the file with `provided` path + path is expected to be `storage_name/file_id` + """ + upload_storage, file_id = path.split("/") + obj = StorageManager.get(upload_storage).get_object(file_id) + if isinstance(obj.driver, LocalStorageDriver): + """Try deleting associated metadata file""" + try: + obj.container.get_object(f"{obj.name}.metadata.json").delete() + except ObjectDoesNotExistError: + pass + return obj.delete() + + @classmethod + def _clear(cls) -> None: + """ + This is only for testing pourposes, resets the StorageManager + """ + cls._default_storage_name = None + cls._storages = {} diff --git a/sqlalchemy_file/stored_file.py b/sqlalchemy_file/stored_file.py new file mode 100644 index 0000000..0649c25 --- /dev/null +++ b/sqlalchemy_file/stored_file.py @@ -0,0 +1,38 @@ +import json +import tempfile +from typing import Optional + +from libcloud.storage.base import Object +from libcloud.storage.drivers.local import LocalStorageDriver +from libcloud.storage.types import ObjectDoesNotExistError + + +class StoredFile: + def __init__(self, obj: Object) -> None: + if isinstance(obj.driver, LocalStorageDriver): + """Retrieve metadata from associated metadata file""" + try: + metadata_obj = obj.container.get_object(f"{obj.name}.metadata.json") + obj.meta_data = json.load(open(metadata_obj.get_cdn_url())) + except ObjectDoesNotExistError: + pass + self.name = obj.name + self.filename = obj.meta_data.get("filename", "unnamed") + self.content_type = obj.extra.get( + "content_type", + obj.meta_data.get("content_type", "application/octet-stream"), + ) + self.object = obj + + def get_cdn_url(self) -> Optional[str]: + try: + return self.object.get_cdn_url() + except NotImplementedError: + return None + + def read(self, n: int = -1) -> bytes: + if isinstance(self.object.driver, LocalStorageDriver): + return open(self.object.get_cdn_url(), "rb").read(n) + _file = tempfile.NamedTemporaryFile() + self.object.download(_file.name, overwrite_existing=True) + return _file.read(n) diff --git a/sqlalchemy_file/types.py b/sqlalchemy_file/types.py new file mode 100644 index 0000000..3415140 --- /dev/null +++ b/sqlalchemy_file/types.py @@ -0,0 +1,330 @@ +from typing import Any, Dict, List, Optional, Set, Tuple, Type, Union + +from sqlalchemy import event, inspect, orm, types +from sqlalchemy.engine import Connection, Dialect +from sqlalchemy.orm import ColumnProperty, Mapper, Session, SessionTransaction +from sqlalchemy.orm.attributes import get_history +from sqlalchemy_file.file import File +from sqlalchemy_file.mutable_list import MutableList +from sqlalchemy_file.processors import Processor +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.validators import ImageValidator, Validator + + +class FileField(types.TypeDecorator): # type: ignore + + """ + Provides support for storing attachments to **SQLAlchemy** models. + + [FileField][sqlalchemy_file.types.FileField] can be used as a Column type to + store files into the model. The actual file itself will be uploaded to a specific + `libcloud.storage.base.Container`, and only the [File][sqlalchemy_file.file.File] + information will be stored on the database as JSON. + + [FileField][sqlalchemy_file.types.FileField] is transaction aware, so it will delete + every uploaded file whenever the transaction is rolled back and will + delete any old file whenever the transaction is committed. + + You can save `str`, `bytes` or any python `file` object + + Each file will be validated by provided validators before being saved into + associate storage `libcloud.storage.base.Container` and can go through different + processors before being saved in the database. + + """ + + impl = types.JSON + + def __init__( + self, + *args: Tuple[Any], + upload_storage: Optional[str] = None, + validators: Optional[List[Validator]] = None, + processors: Optional[List[Processor]] = None, + upload_type: Type[File] = File, + multiple: Optional[bool] = False, + **kwargs: Dict[str, Any], + ) -> None: + """ + Parameters: + upload_storage: storage to use + validators: List of validators to apply + processors: List of validators to apply + upload_type: File class to use, could be + use to set custom File class + multiple: Use this to save multiple files + """ + super().__init__(*args, **kwargs) + if processors is None: + processors = [] + if validators is None: + validators = [] + self.upload_storage = upload_storage + self.upload_type = upload_type + self.multiple = multiple + self.validators = validators + self.processors = processors + + def process_bind_param( + self, value: Any, dialect: Dialect + ) -> Union[None, Dict[str, Any], List[Dict[str, Any]]]: + if not value: + return None + if not self.multiple and not isinstance( + value, self.upload_type + ): # pragma: no cover + raise ValueError(f"Expected {self.upload_type}, received: {type(value)}") + if self.multiple and not ( + isinstance(value, list) + and all([isinstance(v, self.upload_type) for v in value]) + ): # pragma: no cover + raise ValueError( + f"Expected MutableList[{self.upload_type}], received: {type(value)}" + ) + return [v.encode() for v in value] if self.multiple else value.encode() + + def process_result_value( + self, value: Any, dialect: Dialect + ) -> Union[None, MutableList[File], File]: + if value is None: + return None + if type(value) is dict: + return ( + MutableList([self.upload_type.decode(value)]) + if self.multiple + else self.upload_type.decode(value) + ) + return MutableList([self.upload_type.decode(v) for v in value]) + + +class ImageField(FileField): + """Inherits all attributes and methods from [FileField][sqlalchemy_file.types.FileField], + but also validates that the uploaded object is a valid image. + """ + + def __init__( + self, + *args: Tuple[Any], + upload_storage: Optional[str] = None, + image_validator: Optional[ImageValidator] = None, + validators: Optional[List[Validator]] = None, + processors: Optional[List[Processor]] = None, + upload_type: Type[File] = File, + multiple: Optional[bool] = False, + **kwargs: Dict[str, Any], + ) -> None: + """ + Parameters: + upload_storage: storage to use + image_validator: ImageField use default image + validator, Use this property to customize it. + validators: List of additional validators to apply + processors: List of validators to apply + upload_type: File class to use, could be + use to set custom File class + multiple: Use this to save multiple files + """ + if validators is None: + validators = [] + if image_validator is None: + image_validator = ImageValidator() + assert isinstance(image_validator, ImageValidator) + validators.insert(0, image_validator) + super().__init__( + *args, + upload_storage=upload_storage, + validators=validators, + processors=processors, + upload_type=upload_type, + multiple=multiple, + **kwargs, + ) + + +class FileFieldSessionTracker(object): + mapped_entities: Dict[Type[Any], List[str]] = dict() + + @classmethod + def delete_files(cls, paths: Set[str], ctx: str) -> None: + for path in paths: + StorageManager.delete_file(path) + + @classmethod + def clear_session(cls, session: Session) -> None: + if hasattr(session, "_new_files"): + del session._new_files # type: ignore + if hasattr(session, "_old_files"): + del session._old_files # type: ignore + + @classmethod + def add_new_files_to_session(cls, session: Session, paths: List[str]) -> None: + session._new_files = getattr(session, "_new_files", set()) # type: ignore + session._new_files.update(paths) # type: ignore + + @classmethod + def add_old_files_to_session(cls, session: Session, paths: List[str]) -> None: + session._old_files = getattr(session, "_old_files", set()) # type: ignore + session._old_files.update(paths) # type: ignore + + @classmethod + def extract_files_from_history( + cls, data: List[Union[MutableList[File], File]] + ) -> List[str]: + paths = [] + for item in data: + if isinstance(item, list): + paths.extend([f["path"] for f in item]) + elif isinstance(item, File): + paths.append(item["path"]) + return paths + + @classmethod + def _mapper_configured(cls, mapper: Mapper, class_: Any) -> None: + """Detect and listen all class with FileField Column""" + for mapper_property in mapper.iterate_properties: + if isinstance(mapper_property, ColumnProperty) and isinstance( + mapper_property.columns[0].type, FileField + ): + assert ( + len(mapper_property.columns) == 1 + ), "Multiple-column properties are not supported" + if mapper_property.columns[0].type.multiple: + MutableList.associate_with_attribute( + getattr(class_, mapper_property.key) + ) + cls.mapped_entities.setdefault(class_, []).append(mapper_property.key) + + @classmethod + def _after_configured(cls) -> None: + for entity in cls.mapped_entities.keys(): + event.listen(entity, "before_insert", cls._before_insert) + event.listen(entity, "before_update", cls._before_update) + event.listen(entity, "after_update", cls._after_update) + event.listen(entity, "after_delete", cls._after_delete) + + @classmethod + def _after_commit(cls, session: Session) -> None: + """After commit, old files are automatically deleted""" + cls.delete_files(getattr(session, "_old_files", set()), "after_commit") + cls.clear_session(session) + + @classmethod + def _after_soft_rollback(cls, session: Session, _: SessionTransaction) -> None: + """After rollback, new files are automatically deleted""" + cls.delete_files(getattr(session, "_new_files", set()), "after_soft_rollback") + cls.clear_session(session) + + @classmethod + def _after_delete(cls, mapper: Mapper, _: Connection, obj: Any) -> None: + """ + After delete mark all linked files as old in order to delete + them when after session is committed + """ + tracked_columns: List[str] = cls.mapped_entities.get(mapper.class_, []) + for key in tracked_columns: + value = getattr(obj, key) + if value is not None: + cls.add_old_files_to_session( + inspect(obj).session, + [ + f["path"] + for f in (value if isinstance(value, list) else [value]) + ], + ) + + @classmethod + def _after_update(cls, mapper: Mapper, _: Connection, obj: Any) -> None: + """ + After update, mark all edited files as old + in order to delete them when after session is committed + """ + tracked_columns: List[str] = cls.mapped_entities.get(mapper.class_, []) + for key in tracked_columns: + history = get_history(obj, key) + cls.add_old_files_to_session( + inspect(obj).session, cls.extract_files_from_history(history.deleted) + ) + + @classmethod + def _before_update(cls, mapper: Mapper, _: Connection, obj: Any) -> None: + """ + Before updating values, validate and save files. For multiple fields, + mark all removed files as old, as _removed attribute will be + reinitialised after update. + """ + session = inspect(obj).session + tracked_columns: List[str] = cls.mapped_entities.get(mapper.class_, []) + for key in tracked_columns: + value = getattr(obj, key) + if value is not None: + changed, prepare_value = cls.prepare_file_attr(mapper, obj, key) + if changed: + setattr(obj, key, prepare_value) + history = get_history(obj, key) + cls.add_new_files_to_session( + session, cls.extract_files_from_history(history.added) + ) + if isinstance(value, MutableList): + _removed = getattr(value, "_removed", ()) + cls.add_old_files_to_session(session, [f["path"] for f in _removed]) + + @classmethod + def _before_insert(cls, mapper: Mapper, _: Connection, obj: Any) -> None: + """Before inserting values, mark all created files as new. They will be + automatically removed when session rollback""" + + tracked_columns: List[str] = cls.mapped_entities.get(mapper.class_, []) + for key in tracked_columns: + value = getattr(obj, key) + if value is not None: + setattr(obj, key, cls.prepare_file_attr(mapper, obj, key)[1]) + history = get_history(obj, key) + cls.add_new_files_to_session( + inspect(obj).session, cls.extract_files_from_history(history.added) + ) + + @classmethod + def prepare_file_attr( + cls, mapper: Mapper, obj: Any, key: str + ) -> Tuple[bool, Union[File, List[File]]]: + """ + Prepare file before saved to database, convert bytes and string, + saved file into the upload_storage, apply validators and processors + """ + value = getattr(obj, key) + + """Become True when it is new file for single field, + or when new items is added for multiple field""" + changed = False + + column_type = mapper.attrs.get(key).columns[0].type + assert isinstance(column_type, FileField) + upload_type = column_type.upload_type + + prepared_values: List[File] = [] + for v in value if isinstance(value, list) else [value]: + if not isinstance(v, upload_type): + v = upload_type(v) + if not getattr(v, "saved", False): + changed = True + v.apply_validators(column_type.validators, key) + prepared_values.append(v) + + upload_storage = column_type.upload_storage or StorageManager.get_default() + for value in prepared_values: + if not getattr(value, "saved", False): + value.save_to_storage(upload_storage) + value.apply_processors(column_type.processors, upload_storage) + return changed, ( + prepared_values if column_type.multiple else prepared_values[0] + ) + + @classmethod + def setup(cls) -> None: + event.listen(orm.mapper, "mapper_configured", cls._mapper_configured) + event.listen(orm.mapper, "after_configured", cls._after_configured) + event.listen(Session, "after_commit", cls._after_commit) + event.listen(Session, "after_soft_rollback", cls._after_soft_rollback) + + +FileFieldSessionTracker.setup() diff --git a/sqlalchemy_file/validators.py b/sqlalchemy_file/validators.py new file mode 100644 index 0000000..7b6935f --- /dev/null +++ b/sqlalchemy_file/validators.py @@ -0,0 +1,231 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING, List, Optional, Tuple, Union + +from sqlalchemy_file.exceptions import ( + AspectRatioValidationError, + ContentTypeValidationError, + DimensionValidationError, + InvalidImageError, + SizeValidationError, +) +from sqlalchemy_file.helpers import convert_size + +if TYPE_CHECKING: + from sqlalchemy_file.file import File + + +class Validator: + """ + Interface that must be implemented by file validators. + File validators get executed before a file is stored on the database + using one of the supported fields. Can be used to add additional data + to file object or change it. + + """ + + @abstractmethod + def process(self, file: "File", attr_key: str) -> None: # pragma: no cover + """ + Should be overridden in inherited class + + Parameters: + file: [File][sqlalchemy_file.file.File] object + attr_key: current SQLAlchemy column key. Can be passed to + [ValidationError][sqlalchemy_file.exceptions.ValidationError] + """ + pass + + +class SizeValidator(Validator): + """Validate file maximum size + + Attributes: + max_size: + If set, the size of the underlying file must + be below this file size in order to be valid. + The size of the file can be given in one of + the following formats: + + | **Suffix** | **Unit Name** | **Value** | **Example** | + |------------|---------------|-----------------|-------------| + | (none) | byte | 1 byte | `4096` | + | k | kilobyte | 1,000 bytes | `200k` | + | M | megabyte | 1,000,000 bytes | `2M` | + | Ki | kibibyte | 1,024 bytes | `32Ki` | + | Mi | mebibyte | 1,048,576 bytes | `8Mi` | + + For more information, view + [Wikipedia: Binary prefix](https://en.wikipedia.org/wiki/Binary_prefix) + Example: + ```Python + class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField(validators=[SizeValidator(max_size="2M")])) + ``` + + Raises: + SizeValidationError: When file `size` is greater than max_size + + """ + + def __init__(self, max_size: Union[int, str] = 0) -> None: + super().__init__() + self.max_size = max_size + + def process(self, file: "File", attr_key: str) -> None: + if file.size > convert_size(self.max_size): + raise SizeValidationError( + attr_key, + "The file is too large (%s bytes). Allowed maximum size is %s." + % (file.size, self.max_size), + ) + + +class ContentTypeValidator(Validator): + """Validate file mimetype + Attributes: + allowed_content_types: If set, file `content_type` + must be one of the provided list. + + Example: + ```Python + class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column( + FileField(validators=[ContentTypeValidator(["text/plain", "text/csv"])]) + ) + ``` + + Raises: + ContentTypeValidationError: When file `content_type` not in allowed_content_types + + + """ + + def __init__(self, allowed_content_types: Optional[List[str]] = None) -> None: + super().__init__() + self.allowed_content_types = allowed_content_types + + def process(self, file: "File", attr_key: str) -> None: + if ( + self.allowed_content_types is not None + and file.content_type not in self.allowed_content_types + ): + raise ContentTypeValidationError( + attr_key, + "File content_type %s is not allowed. Allowed content_types are: %s" + % (file.content_type, self.allowed_content_types), + ) + + +class ImageValidator(ContentTypeValidator): + """Default Validator for ImageField + + Attributes: + min_wh: Minimum allowed dimension (w, h). + max_wh: Maximum allowed dimension (w, h). + allowed_content_types: An iterable whose items are + allowed content types. Default is `image/*` + min_aspect_ratio: Minimum allowed image aspect ratio. + max_aspect_ratio: Maximum allowed image aspect ratio. + + Example: + ```Python + + class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column( + ImageField( + image_validator=ImageValidator( + allowed_content_types=["image/x-icon", "image/tiff", "image/jpeg"], + min_wh=(200, 200), + max_wh=(400, 400), + min_aspect_ratio=1, + max_aspect_ratio=16/9, + ) + ) + ) + ``` + + Raises: + ContentTypeValidationError: When file `content_type` not in allowed_content_types + InvalidImageError: When file is not a valid image + DimensionValidationError: When image width and height constraints fail. + + Will add `width` and `height` properties to the file object + """ + + def __init__( + self, + min_wh: Optional[Tuple[int, int]] = None, + max_wh: Optional[Tuple[int, int]] = None, + min_aspect_ratio: Optional[float] = None, + max_aspect_ratio: Optional[float] = None, + allowed_content_types: Optional[List[str]] = None, + ): + from PIL import Image + + Image.init() + super().__init__( + allowed_content_types + if allowed_content_types is not None + else [type for type in Image.MIME.values()] + ) + self.min_width, self.min_height = min_wh if min_wh else (None, None) + self.max_width, self.max_height = max_wh if max_wh else (None, None) + self.min_aspect_ratio = min_aspect_ratio + self.max_aspect_ratio = max_aspect_ratio + self.image = Image + + def process(self, file: "File", attr_key: str) -> None: + super().process(file, attr_key) + import PIL + + try: + image = self.image.open(file.original_content) + except (PIL.UnidentifiedImageError, OSError): + raise InvalidImageError(attr_key, "Provide valid image file") + width, height = image.width, image.height + if self.min_width and width < self.min_width: + raise DimensionValidationError( + attr_key, + f"Minimum allowed width is: {self.min_width}, but {width} is given.", + ) + if self.min_height and height < self.min_height: + raise DimensionValidationError( + attr_key, + f"Minimum allowed height is: {self.min_height}, but {height} is given.", + ) + + if self.max_width and self.max_width < width: + raise DimensionValidationError( + attr_key, + f"Maximum allowed width is: {self.max_width}, but {width} is given.", + ) + + if self.max_height and self.max_height < height: + raise DimensionValidationError( + attr_key, + f"Maximum allowed height is: {self.max_height}, but {height} is given.", + ) + aspect_ratio = width / height + if (self.min_aspect_ratio and self.min_aspect_ratio > aspect_ratio) or ( + self.max_aspect_ratio and self.max_aspect_ratio < aspect_ratio + ): + raise AspectRatioValidationError( + attr_key, + f"Invalid aspect ratio {width} / {height} = {aspect_ratio}," + "accepted_range: " + f"{self.min_aspect_ratio} - {self.max_aspect_ratio}", + ) + file.update({"width": width, "height": height}) + file.original_content.seek(0) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_content_type_validator.py b/tests/test_content_type_validator.py new file mode 100644 index 0000000..2d6a685 --- /dev/null +++ b/tests/test_content_type_validator.py @@ -0,0 +1,105 @@ +import tempfile + +import pytest +from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import Session, declarative_base +from sqlalchemy_file.exceptions import ContentTypeValidationError +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField +from sqlalchemy_file.validators import ContentTypeValidator + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +@pytest.fixture +def fake_txt_file(): + file = tempfile.NamedTemporaryFile(suffix=".txt") + file.write(b"This is a fake text file") + file.flush() + return file + + +@pytest.fixture +def fake_csv_file(): + file = tempfile.NamedTemporaryFile(suffix=".csv") + file.write(b"This is a fake csv file") + file.flush() + return file + + +@pytest.fixture +def fake_pdf_file(): + file = tempfile.NamedTemporaryFile(suffix=".pdf") + file.write(b"This is a fake pdf file") + file.flush() + return file + + +@pytest.fixture +def fake_file(): + file = tempfile.NamedTemporaryFile() + file.write(b"This is a fake with unknown content type") + file.flush() + return file + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column( + FileField(validators=[ContentTypeValidator(["text/plain", "text/csv"])]) + ) + + def __repr__(self): + return "" % ( + self.id, + self.name, + self.content, + ) # pragma: no cover + + +class TestContentTypeValidator: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("content-type-validator")) + + def test_content_type_validator( + self, fake_file, fake_pdf_file, fake_csv_file, fake_txt_file + ): + with Session(engine) as session: + attachment = Attachment(name="unknown file", content=fake_file) + session.add(attachment) + with pytest.raises(ContentTypeValidationError): + session.flush() + + with Session(engine) as session: + attachment = Attachment(name="pdf file", content=fake_pdf_file) + session.add(attachment) + with pytest.raises(ContentTypeValidationError): + session.flush() + + with Session(engine) as session: + attachment = Attachment(name="text file", content=fake_txt_file) + session.add(attachment) + session.flush() + session.refresh(attachment) + assert attachment.content.file is not None + + with Session(engine) as session: + attachment = Attachment(name="csv file", content=fake_csv_file) + session.add(attachment) + session.flush() + session.refresh(attachment) + assert attachment.content.file is not None + + def teardown(self): + for obj in StorageManager.get().list_objects(): + obj.delete() + StorageManager.get().delete() + Base.metadata.drop_all(engine) diff --git a/tests/test_image_field.py b/tests/test_image_field.py new file mode 100644 index 0000000..2d513e3 --- /dev/null +++ b/tests/test_image_field.py @@ -0,0 +1,99 @@ +import base64 +import tempfile + +import pytest +from sqlalchemy import Column, Integer, String, select +from sqlalchemy.orm import Session, declarative_base +from sqlalchemy_file.exceptions import ContentTypeValidationError, InvalidImageError +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import ImageField + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +@pytest.fixture +def fake_text_file(): + file = tempfile.NamedTemporaryFile(suffix=".txt") + file.write(b"Trying to save text file as image") + file.seek(0) + return file + + +@pytest.fixture +def fake_invalid_image(): + file = tempfile.NamedTemporaryFile(suffix=".png") + file.write(b"Pass through content type validation") + file.seek(0) + return file + + +@pytest.fixture +def fake_valid_image_content(): + return base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAYAAACNMs+9AAAAAXNSR0IArs4c6QAAAHNJREFUKFOdkLEKwCAMRM/JwUFwdPb" + "/v8RPEDcdBQcHJyUt0hQ6hGY6Li8XEhVjXM45aK3xVXNOtNagcs6LRAgB1toX23tHSgkUpEopyxhzGRw" + "+EHljjBv03oM3KJYP1lofkJoHJs3T/4Gi1aJjxO+RPnwDur2EF1gNZukAAAAASUVORK5CYII=" + ) + + +@pytest.fixture +def fake_valid_image(fake_valid_image_content): + file = tempfile.NamedTemporaryFile(suffix=".png") + data = fake_valid_image_content + file.write(data) + file.seek(0) + return file + + +class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column(ImageField) + + def __repr__(self): + return "" % ( + self.id, + self.title, + self.cover, + ) # pragma: no cover + + +class TestImageField: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("test-image-field")) + + def test_autovalidate_content_type(self, fake_text_file) -> None: + with Session(engine) as session: + session.add(Book(title="Pointless Meetings", cover=fake_text_file)) + with pytest.raises(ContentTypeValidationError): + session.flush() + + def test_autovalidate_image(self, fake_invalid_image) -> None: + with Session(engine) as session: + session.add(Book(title="Pointless Meetings", cover=fake_invalid_image)) + with pytest.raises(InvalidImageError): + session.flush() + + def test_create_image(self, fake_valid_image, fake_valid_image_content) -> None: + with Session(engine) as session: + session.add(Book(title="Pointless Meetings", cover=fake_valid_image)) + session.flush() + book = session.execute( + select(Book).where(Book.title == "Pointless Meetings") + ).scalar_one() + assert book.cover.file.read() == fake_valid_image_content + assert book.cover["width"] is not None + assert book.cover["height"] is not None + + def teardown(self): + for obj in StorageManager.get().list_objects(): + obj.delete() + StorageManager.get().delete() + Base.metadata.drop_all(engine) diff --git a/tests/test_image_validator.py b/tests/test_image_validator.py new file mode 100644 index 0000000..b76101b --- /dev/null +++ b/tests/test_image_validator.py @@ -0,0 +1,195 @@ +import base64 +import tempfile + +import pytest +from sqlalchemy import Column, Integer, String, select +from sqlalchemy.orm import Session, declarative_base +from sqlalchemy_file.exceptions import ( + AspectRatioValidationError, + DimensionValidationError, +) +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import ImageField +from sqlalchemy_file.validators import ImageValidator + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +@pytest.fixture +def fake_image_low_width(): # width=5, height=10 + file = tempfile.NamedTemporaryFile(suffix=".png") + data = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAUAAAAKCAYAAAB8OZQwAAAAAXNSR0IArs4c6QAAAHNJREFUGFd9zkEHhCEUheHTJiIRtYyW/f9" + "/0q5NRNpFJKLNHc2Y1XzmLh+u87IYIxlj8L3eO9hF5xy01hhjoNYKlnOmcw5CCEgpgXMO1lqjOSeklFhrQSn1QSEESinw3mPv/Qd/3h" + "+HHpMuWmtBRO/+G/8CjIpct7mYGLEAAAAASUVORK5CYII=" + ) + file.write(data) + file.seek(0) + return file + + +@pytest.fixture +def fake_image_low_height(): # width=10, height=7 + file = tempfile.NamedTemporaryFile(suffix=".png") + data = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAHCAYAAAAxrNxjAAAAAXNSR0IArs4c6QAAAKNJREFUKFNtj8sKhCAYhU+LsOiy0YVIBr3/y" + "/gGQam0qI0UES4KHWZqhvlXB/6Pc0mUUmeWZaiqCv/OOYfjOJBorU/vPfZ9h5QSZVlGfl1XjOOIPM" + "+RpikSY8wphMA8z9Bag3MewWma0DQNGGOw1t5geIaYvu8j2HUd6rqO+gtcliVGPR1DFUrpC3x2bNsWRVFEl23bMAzD3TGsJoR8Yn6Xv1df" + "/fReRqm21woAAAAASUVORK5CYII=" + ) + file.write(data) + file.seek(0) + return file + + +@pytest.fixture +def fake_image_huge_width(): # width=20, height=10 + file = tempfile.NamedTemporaryFile(suffix=".png") + data = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAABQAAAAKCAYAAAC0VX7mAAAAAXNSR0IArs4c6QAAAPJJREFUOE+lkjEKg0AQRf" + "+CFoKNWghWgnZ2Nh7AztJbWHkJ72AhHsFOsPIMIngEFaxsBC2EDa4oScBEkqmW4f+3" + "/JkhVVXRMAyRJAlEUcQvtSwLpmliVtJ1HY3jGI7jwDRNqKoKSZJuccdxxDAMTLv55nnegUVRQNd12Lb9IrgCv4MOXd/3O7AsSyiKAs" + "/z2G9Xhqv+EecE5nkOwzDguu5L1AOwrivrcxz3cSQnME1T+L4Py7L+B7ZtS4MgQJZl4Hn+/8iyLDNgFEWXs3tf" + "+celNE1DNU27td1vYHY2dV3TTbgdtSAIoJSCEMK82/tOPR/2A3aLsV8FPmE1AAAAAElFTkSuQmCC" + ) + file.write(data) + file.seek(0) + return file + + +@pytest.fixture +def fake_image_huge_height(): # width=10, height=17 + file = tempfile.NamedTemporaryFile(suffix=".png") + data = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAoAAAARCAYAAADkIz3lAAAAAXNSR0IArs4c6QAAAQpJREFUKFOVkj2LhEAMhjOF2wgq6xfa2CiK+P9" + "/h4WIooUWKqIuq2CjxRzJsR5y3t1eqiF5JnnfzLA4jrlhGMA5h59iGAZgSZJwTdPANM1Lru97GMcRWFVVfJomiKIIbrfbCd62DZIkAVV" + "VgTVNQzP3fQfHcU5gXdcgCALlCLQsi24iKEkSFZZlAQRxUtd1n6Bt2/B4PABF+75PYJ7noOs63O93aNv2C8RiURQgyzKB8zyD53l0/" + "gau6wplWVLRdV0QRfEaxGyaplQMw/AwdtkRx2Pg2FfHk5mXRkVRCHw+n4fGE4hLR9dBEBCYZRm5xmUf4Nt7/OtlGGP/eOt3fg/qZ/gf" + "UfRvgSY/AOhSyq08LXSPAAAAAElFTkSuQmCC" + ) + file.write(data) + file.seek(0) + return file + + +@pytest.fixture +def fake_image_invalid_ratio(): # width=10, height=15 + file = tempfile.NamedTemporaryFile(suffix=".png") + data = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAPCAYAAADd/14OAAAAAXNSR0IArs4c6QAAAMFJREFUKFOdUrEKhDAUy3N1EMEuou76/x" + "/iVHetToI4dG2PVMop54F32UpDkpf3pO97r5TCN3jvsa4rRGvt0zRF0zS33HEcYa2FGGP8tm2o6xpZll3I" + "+75jmibkeQ6Z5zkoGmPQti2SJAlk5xyGYUBVVYciiWVZghZEjHB+L8vyJlJBax0iELTsui44XIj8ZCYqicgl8weRZKoSVIu4VaQlu2PW2MJ" + "/GR9NzR7PU8YeYwu/bebxrnk9RVGE7u4Qr+cFO529ZB6GXB0AAAAASUVORK5CYII= " + ) + file.write(data) + file.seek(0) + return file + + +@pytest.fixture +def fake_valid_image(): # width=14, height=15 + file = tempfile.NamedTemporaryFile(suffix=".png") + data = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAA4AAAAPCAYAAADUFP50AAAAAXNSR0IArs4c6QAAASpJREFUOE+tU0GLglAYHIs0hAg0KjO82cn//y" + "+8eUq6iJZKCiJEGuUyH7Sxm0UsOyd5fsN8M2+e4vt+Z5omRqMRPkHbtijLEsput+vquobrutB1" + "/S33dDohDENMJhMoSZJ0qqoiTVMha5rWS26aRkjL5RJUFeJqtUKe5zgej0L+vfblchHSbDbDfD7Hfr9" + "/EClzOBxQVRU2mw0Gg4Eo3243bLdbTKdTWJYlZ09EHiZJAnqhMkElerdt+9tCL5F/oygC1yO4tuM4P3y/JHIqCAIZ9jzvKaz" + "/VYzjWDwyIILB0ON6vX7t8Z4qgxkOhzJ4vV4loJepZlmGoije3iOruVgsHtfBttAwlcbjcW9zzuezKLMsbNHfu8rXYRiGdLTrOiiKIor87sP9" + "dXwBhJXeghs+f/MAAAAASUVORK5CYII=" + ) + file.write(data) + file.seek(0) + return file + + +class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column( + ImageField( + image_validator=ImageValidator( + min_wh=(10, 10), + max_wh=(15, 15), + min_aspect_ratio=12 / 15, + max_aspect_ratio=1, + ) + ) + ) + + def __repr__(self): + return "" % ( + self.id, + self.title, + self.cover, + ) # pragma: no cover + + +class TestImageValidator: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("test-image-validator")) + + def test_min_width_validation(self, fake_image_low_width) -> None: + with Session(engine) as session: + session.add(Book(title="Pointless Meetings", cover=fake_image_low_width)) + with pytest.raises( + DimensionValidationError, + match="Minimum allowed width is: 10, but 5 is given", + ): + session.flush() + + def test_min_height_validation(self, fake_image_low_height) -> None: + with Session(engine) as session: + session.add(Book(title="Pointless Meetings", cover=fake_image_low_height)) + with pytest.raises( + DimensionValidationError, + match="Minimum allowed height is: 10, but 7 is given.", + ): + session.flush() + + def test_max_width_validation(self, fake_image_huge_width) -> None: + with Session(engine) as session: + session.add(Book(title="Pointless Meetings", cover=fake_image_huge_width)) + with pytest.raises( + DimensionValidationError, + match="Maximum allowed width is: 15, but 20 is given", + ): + session.flush() + + def test_max_height_validation(self, fake_image_huge_height) -> None: + with Session(engine) as session: + session.add(Book(title="Pointless Meetings", cover=fake_image_huge_height)) + with pytest.raises( + DimensionValidationError, + match="Maximum allowed height is: 15, but 17 is given.", + ): + session.flush() + + def test_invalid_aspect_ratio(self, fake_image_invalid_ratio) -> None: + with Session(engine) as session: + session.add( + Book(title="Pointless Meetings", cover=fake_image_invalid_ratio) + ) + with pytest.raises(AspectRatioValidationError): + session.flush() + + def test_valid_image(self, fake_valid_image) -> None: + with Session(engine) as session: + session.add(Book(title="Pointless Meetings", cover=fake_valid_image)) + session.flush() + book = session.execute( + select(Book).where(Book.title == "Pointless Meetings") + ).scalar_one() + assert book.cover.file is not None + + def teardown(self): + for obj in StorageManager.get().list_objects(): + obj.delete() + StorageManager.get().delete() + Base.metadata.drop_all(engine) diff --git a/tests/test_metadata.py b/tests/test_metadata.py new file mode 100644 index 0000000..8217af0 --- /dev/null +++ b/tests/test_metadata.py @@ -0,0 +1,48 @@ +import pytest +from libcloud.storage.drivers.local import LocalStorageDriver +from libcloud.storage.types import ObjectDoesNotExistError +from sqlalchemy_file.helpers import get_content_from_file_obj +from sqlalchemy_file.storage import StorageManager + +from tests.utils import get_test_container + + +class TestMetadata: + def setup(self) -> None: + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("test-metadata")) + + def test_add_metadata(self): + name = "test_metadata.txt" + stored_file = StorageManager.save_file( + name, + get_content_from_file_obj(b"Test metadata"), + metadata={"content_type": "text/plain", "filename": "test_metadata.txt"}, + ) + if isinstance(stored_file.object.driver, LocalStorageDriver): + assert ( + stored_file.object.container.get_object(f"{name}.metadata.json") + is not None + ) + else: + with pytest.raises(ObjectDoesNotExistError): + stored_file.object.container.get_object(f"{name}.metadata.json") + assert stored_file.filename == "test_metadata.txt" + assert stored_file.content_type == "text/plain" + StorageManager.delete_file("test/test_metadata.txt") + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object("test_metadata.txt.metadata.json") + + def test_no_metadata(self): + name = "test_metadata.txt" + stored_file = StorageManager.save_file( + name, get_content_from_file_obj(b"Test metadata") + ) + with pytest.raises(ObjectDoesNotExistError): + stored_file.object.container.get_object(f"{name}.metadata.json") + assert stored_file.filename == "unnamed" + assert stored_file.content_type == "application/octet-stream" + StorageManager.delete_file("test/test_metadata.txt") + + def teardown(self): + StorageManager.get().delete() diff --git a/tests/test_multiple_field.py b/tests/test_multiple_field.py new file mode 100644 index 0000000..156765e --- /dev/null +++ b/tests/test_multiple_field.py @@ -0,0 +1,362 @@ +import tempfile + +import pytest +from libcloud.storage.types import ObjectDoesNotExistError +from sqlalchemy import Column, Integer, String, select +from sqlalchemy.orm import Session, declarative_base +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +@pytest.fixture +def fake_content(): + return "This is a fake file" + + +@pytest.fixture +def fake_file(fake_content): + file = tempfile.NamedTemporaryFile() + file.write(fake_content.encode()) + file.seek(0) + return file + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + multiple_content = Column(FileField(multiple=True)) + + def __repr__(self): + return "" % ( + self.id, + self.name, + self.multiple_content, + ) # pragma: no cover + + +class TestMultipleField: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("test-multiple-field")) + + def test_create_multiple_content(self, fake_file, fake_content) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Create multiple", + multiple_content=[ + "from str", + b"from bytes", + fake_file, + ], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create multiple") + ).scalar_one() + assert attachment.multiple_content[0].file.read().decode() == "from str" + assert attachment.multiple_content[1].file.read() == b"from bytes" + assert attachment.multiple_content[2].file.read() == fake_content.encode() + + def test_create_multiple_content_rollback(self, fake_file, fake_content) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Create multiple content rollback", + multiple_content=[ + "from str", + b"from bytes", + fake_file, + ], + ) + ) + session.flush() + attachment = session.execute( + select(Attachment).where( + Attachment.name == "Create multiple content rollback" + ) + ).scalar_one() + paths = [p["path"] for p in attachment.multiple_content] + assert all([StorageManager.get_file(path) is not None for path in paths]) + session.rollback() + for path in paths: + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(path) + + def test_edit_existing_multiple_content(self) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple content edit all", + multiple_content=[b"Content 1", b"Content 2"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Multiple content edit all") + ).scalar_one() + old_paths = [f["path"] for f in attachment.multiple_content] + attachment.multiple_content = [b"Content 1 edit", b"Content 2 edit"] + session.add(attachment) + session.commit() + session.refresh(attachment) + assert attachment.multiple_content[0].file.read() == b"Content 1 edit" + assert attachment.multiple_content[1].file.read() == b"Content 2 edit" + for path in old_paths: + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(path) + + def test_edit_existing_multiple_content_rollback(self) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple content edit all rollback", + multiple_content=[b"Content 1", b"Content 2"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where( + Attachment.name == "Multiple content edit all rollback" + ) + ).scalar_one() + old_paths = [f["path"] for f in attachment.multiple_content] + attachment.multiple_content = [b"Content 1 edit", b"Content 2 edit"] + session.add(attachment) + session.flush() + session.refresh(attachment) + assert attachment.multiple_content[0].file.read() == b"Content 1 edit" + assert attachment.multiple_content[1].file.read() == b"Content 2 edit" + new_paths = [f["path"] for f in attachment.multiple_content] + session.rollback() + for path in new_paths: + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(path) + for path in old_paths: + assert StorageManager.get_file(path) is not None + + def test_edit_existing_multiple_content_add_element(self) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple content add element", + multiple_content=[b"Content 1", b"Content 2"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where( + Attachment.name == "Multiple content add element" + ) + ).scalar_one() + assert len(attachment.multiple_content) == 2 + attachment.multiple_content.append(b"Content 3") + attachment.multiple_content += [b"Content 4"] + attachment.multiple_content.extend([b"Content 5"]) + session.add(attachment) + session.commit() + session.refresh(attachment) + assert len(attachment.multiple_content) == 5 + assert attachment.multiple_content[0].file.read() == b"Content 1" + assert attachment.multiple_content[1].file.read() == b"Content 2" + assert attachment.multiple_content[2].file.read() == b"Content 3" + assert attachment.multiple_content[3].file.read() == b"Content 4" + assert attachment.multiple_content[4].file.read() == b"Content 5" + + def test_edit_existing_multiple_content_add_element_rollback(self) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple content add element rollback", + multiple_content=[b"Content 1", b"Content 2"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where( + Attachment.name == "Multiple content add element rollback" + ) + ).scalar_one() + attachment.multiple_content += [b"Content 3", b"Content 4"] + session.add(attachment) + session.flush() + session.refresh(attachment) + assert len(attachment.multiple_content) == 4 + path3 = attachment.multiple_content[2].path + path4 = attachment.multiple_content[3].path + assert StorageManager.get_file(path3) is not None + assert StorageManager.get_file(path4) is not None + session.rollback() + assert len(attachment.multiple_content) == 2 + for path in (path3, path4): + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(path) + + def test_edit_existing_multiple_content_remove_element(self) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple content remove element", + multiple_content=[ + b"Content 1", + b"Content 2", + b"Content 3", + b"Content 4", + b"Content 5", + ], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where( + Attachment.name == "Multiple content remove element" + ) + ).scalar_one() + first_removed = attachment.multiple_content.pop(1) + second_removed = attachment.multiple_content[3] + attachment.multiple_content.remove(second_removed) + third_removed = attachment.multiple_content[2] + del attachment.multiple_content[2] + session.add(attachment) + session.commit() + session.refresh(attachment) + assert len(attachment.multiple_content) == 2 + assert attachment.multiple_content[0].file.read() == b"Content 1" + assert attachment.multiple_content[1].file.read() == b"Content 3" + for file in (first_removed, second_removed, third_removed): + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(file.path) + + def test_edit_existing_multiple_content_remove_element_rollback(self) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple content remove element rollback", + multiple_content=[b"Content 1", b"Content 2", b"Content 3"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where( + Attachment.name == "Multiple content remove element rollback" + ) + ).scalar_one() + attachment.multiple_content.pop(0) + session.add(attachment) + session.flush() + session.refresh(attachment) + assert len(attachment.multiple_content) == 2 + assert attachment.multiple_content[0].file.read() == b"Content 2" + assert attachment.multiple_content[1].file.read() == b"Content 3" + session.rollback() + assert len(attachment.multiple_content) == 3 + assert attachment.multiple_content[0].file.read() == b"Content 1" + assert attachment.multiple_content[1].file.read() == b"Content 2" + assert attachment.multiple_content[2].file.read() == b"Content 3" + + def test_edit_existing_multiple_content_replace_element(self, fake_file) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple content replace", + multiple_content=[b"Content 1", b"Content 2", b"Content 3"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Multiple content replace") + ).scalar_one() + before_replaced_path = attachment.multiple_content[1].path + attachment.multiple_content[1] = b"Content 2 replaced" + session.add(attachment) + session.commit() + session.refresh(attachment) + assert attachment.multiple_content[1].file.read() == b"Content 2 replaced" + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(before_replaced_path) + + def test_edit_existing_multiple_content_replace_element_rollback( + self, fake_file + ) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple content replace rollback", + multiple_content=[b"Content 1", b"Content 2", b"Content 3"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where( + Attachment.name == "Multiple content replace rollback" + ) + ).scalar_one() + attachment.multiple_content[1] = b"Content 2 replaced" + session.add(attachment) + session.flush() + session.refresh(attachment) + assert attachment.multiple_content[1].file.read() == b"Content 2 replaced" + new_path = attachment.multiple_content[1].path + session.rollback() + assert attachment.multiple_content[1].file.read() == b"Content 2" + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(new_path) + + def test_delete_existing_multiple_content(self, fake_file) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Deleting multiple content", + multiple_content=[b"Content 1", b"Content 2", b"Content 3"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Deleting multiple content") + ).scalar_one() + file_ids = [f.file_id for f in attachment.multiple_content] + for file_id in file_ids: + assert StorageManager.get().get_object(file_id) is not None + session.delete(attachment) + session.commit() + for file_id in file_ids: + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(file_id) + + def test_delete_existing_multiple_content_rollback(self, fake_file) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Deleting multiple content rollback", + multiple_content=[b"Content 1", b"Content 2", b"Content 3"], + ) + ) + session.commit() + attachment = session.execute( + select(Attachment).where( + Attachment.name == "Deleting multiple content rollback" + ) + ).scalar_one() + file_ids = [f.file_id for f in attachment.multiple_content] + for file_id in file_ids: + assert StorageManager.get().get_object(file_id) is not None + session.delete(attachment) + session.flush() + session.rollback() + for file_id in file_ids: + assert StorageManager.get().get_object(file_id) is not None + + def teardown(self): + for obj in StorageManager.get().list_objects(): + obj.delete() + StorageManager.get().delete() + Base.metadata.drop_all(engine) diff --git a/tests/test_multiple_storage.py b/tests/test_multiple_storage.py new file mode 100644 index 0000000..0211d39 --- /dev/null +++ b/tests/test_multiple_storage.py @@ -0,0 +1,56 @@ +import pytest +from libcloud.storage.types import ObjectDoesNotExistError +from sqlalchemy import Column, Integer, String, select +from sqlalchemy.orm import Session, declarative_base +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content_first = Column(FileField(upload_storage="first")) + content_second = Column(FileField(upload_storage="second")) + + +class TestMultipleStorage: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("first", get_test_container("first")) + StorageManager.add_storage("second", get_test_container("second")) + + def test_multiple_storage(self) -> None: + with Session(engine) as session: + session.add( + Attachment( + name="Multiple Storage", + content_first=b"first", + content_second=b"second", + ) + ) + session.flush() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Multiple Storage") + ).scalar_one() + first_fileid = attachment.content_first.file_id + second_fileid = attachment.content_second.file_id + assert StorageManager.get("first").get_object(first_fileid) is not None + assert StorageManager.get("second").get_object(second_fileid) is not None + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get("first").get_object(second_fileid) + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get("second").get_object(first_fileid) + session.rollback() + + def teardown(self): + StorageManager.get("first").delete() + StorageManager.get("second").delete() + Base.metadata.drop_all(engine) diff --git a/tests/test_mutable_list.py b/tests/test_mutable_list.py new file mode 100644 index 0000000..b49cad7 --- /dev/null +++ b/tests/test_mutable_list.py @@ -0,0 +1,75 @@ +from sqlalchemy_file.mutable_list import MutableList + + +def test_init_value() -> None: + arr: MutableList[int] = MutableList() + assert arr == [] + arr = MutableList([1, 2, 3]) + assert arr == [1, 2, 3] + + +def test_keep_traced_when_pop() -> None: + arr: MutableList[int] = MutableList([1, 2, 3]) + removed_item = arr.pop(1) + assert removed_item == 2 + assert len(arr) == 2 + assert arr._removed == [2] + arr.extend([4, 5, 6]) + assert arr == [1, 3, 4, 5, 6] + arr.pop(3) + assert arr._removed == [2, 5] + + +def test_keep_traced_when_replace() -> None: + arr: MutableList[int] = MutableList([1, 2, 3, 4, 5]) + arr[1] = 9 + arr[2] = 7 + assert arr == [1, 9, 7, 4, 5] + assert arr._removed == [2, 3] + + +def test_keep_traced_when_slice_replace() -> None: + arr: MutableList[int] = MutableList([1, 2, 3, 4, 5, 6, 7]) + arr[2:4] = [1] + assert arr == [1, 2, 1, 5, 6, 7] + assert arr._removed == [3, 4] + + +def test_keep_traced_when_remove() -> None: + arr: MutableList[int] = MutableList([1, 2, 3, 5, 6, 7]) + arr.remove(2) + arr.remove(6) + assert arr == [1, 3, 5, 7] + assert arr._removed == [2, 6] + + +def test_keep_traced_when_del() -> None: + arr: MutableList[int] = MutableList([1, 2, 3, 5, 6, 7]) + del arr[2] + del arr[4] + assert arr == [1, 2, 5, 6] + assert arr._removed == [3, 7] + + +def test_keep_traced_when_del_slice() -> None: + arr: MutableList[int] = MutableList([1, 2, 3, 5, 6, 7]) + del arr[2:4] + assert arr == [1, 2, 6, 7] + assert arr._removed == [3, 5] + + +def test_keep_traced_when_clear() -> None: + arr: MutableList[int] = MutableList([1, 2, 3]) + arr.clear() + assert arr == [] + assert arr._removed == [1, 2, 3] + + +def test_other() -> None: + arr: MutableList[int] = MutableList([1, 2]) + arr.insert(0, 5) + assert arr == [5, 1, 2] + arr.sort() + assert arr == [1, 2, 5] + arr.reverse() + assert arr == [5, 2, 1] diff --git a/tests/test_processor.py b/tests/test_processor.py new file mode 100644 index 0000000..2d0d344 --- /dev/null +++ b/tests/test_processor.py @@ -0,0 +1,76 @@ +import base64 +import tempfile + +import pytest +from sqlalchemy import Column, Integer, String, select +from sqlalchemy.orm import Session, declarative_base +from sqlalchemy_file.processors import ThumbnailGenerator +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import ImageField + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +@pytest.fixture +def fake_image(): + file = tempfile.NamedTemporaryFile(suffix=".png") + data = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAYAAACtWK6eAAAAAXNSR0IArs4c6QAAD1BJREFUeF7tnVuIlVUUx9dk5r3JazWi" + "+TAUWYQoqRVk3npQw4yS7EEUKSkzejBRzBIJgiiSElEwUgnFCxqEPSjaRbQHR0spH2wa1Bgjq9G0MbXLxNp2xuPMOWe" + "+y76stfb6IJSZ71t7rf/6/87a+zuEVXV1dS19+vSBLl26gF6qgCpwVYHLly9DU1MTVDU0NLTgX2pra6G6ulr1UQWiV+D333+H" + "+vp6wMFR1djY2NKjRw/zA4Ukem9EL0ABDmShubn5KiA1NTVQ/AudJNH7JEoB2jJw" + "+vTpa4CgIgpJlL7Qost4vx0gCol6JUYFyg2GkoAoJDFaJN6aK+2aygKikMRrmJgq7" + "+hIUREQhSQmq8RXa0dwoCIdAqKQxGecGCpOAkdiQBSSGCwTT41J4UgFiEISj4EkV5oGjtSAKCSSrSO" + "/trRwZAJEIZFvJIkVZoEjMyAKiUQLya0pKxy5AFFI5BpKUmV54MgNiEIiyUryaskLhxVAFBJ5xpJQkQ04rAGikEiwlJwabMFhFRCFRI7BOFdiEw7rgCgknK3FP3fbcDgBRCHhbzSOFbiAwxkgCglHi/HN2RUcTgFRSPgajlPmLuFwDohCwslq/HJ1DYcXQBQSfsbjkLEPOLwBopBwsByfHH3B4RUQhYSPASln6hMO74AoJJStRz8333AEAUQhoW9EihmGgCMYIAoJRQvSzSkUHEEBUUjoGpJSZiHhCA6IQkLJivRyCQ0HCUAUEnrGpJARBTjIAKKQULAknRyowEEKEIWEjkFDZkIJDnKAKCQhrRl+bWpwkAREIQlv1BAZUISDLCAKSQiLhluTKhykAVFIwhnW58qU4SAPiELi06r+16IOBwtAFBL/xvWxIgc42ACikPiwrL81uMDBChCFxJ+BXa7ECQ52gCgkLq3rPjY3OFgCopC4N7KLFTjCwRYQhcSFhd3F5AoHa0AUEneGthmZMxzsAVFIbFrZfizucIgARCGxb2wbESXAIQYQhcSGpe3FkAKHKEAUEnsGzxNJEhziAFFI8lg7/7PS4BAJiEKS3+hZIkiEQywgCkkWi2d/RiocogFRSLIbPs2TkuEQD4hCksbq6e+VDkcUgCgk6Y2f5IkY4IgGEIUkieWT3xMLHFEBopAkB6DSnTHBER0gCkk+SGKDI0pAFJJskMQIR7SAKCTpIIkVjqgBUUiSQRIzHNEDopBUhiR2OBSQ//2hRmgPimpyVZPTp09DVWNjY0tNTU2ymSv0LjXEtcaqFte0UECKgFdjAKgG108ABaTNRIzZIDHXXm5jpICUUCZGo8RYc5LTggJSRqWYDBNTrUmgKL5HAamgWAzGiaHGtFAoICkUk2wgybWlaHHFW3WCJFBSopEk1pSglalvUUASSibJUJJqSdi+zLcpICmkk2AsCTWkaFnuWxWQlBJyNhjn3FO2ydrtCkgGKTkajWPOGVpj/REFJKOknAzHKdeM7XD2mAKSQ1oOxuOQY44WOH9UAckpMWUDUs4tp+zeHldALEhN0YgUc7IgtfcQCoglySkZklIuluQNFkYBsSg9BWNSyMGipMFDKSCWWxDSoCHXtiwjmXAKiINWhDBqiDUdSEcupALiqCU+DetzLUdykQ2rgDhsjQ/j+ljDoUTkQysgjlvk0sAuYzuWhU14BcRDq1wY2UVMD1KwW0IB8dQym4a2GctT+WyXUUA8ts6GsW3E8Fgy+6UUEM8tzGPwPM96LlPMcgpIgFZmMXqWZwKUJm5JBSRQS9MYPs29gcoRu6wCErC1SYyf5J6AJYhfWgEJ3OJKACgcgZuj//xB+AZgBqVAUDho9EYnCI0+XAcJplRfXw+1tbVQXV1NJMM401BACPW9MDUwJYWDRmMUEBp9MFkoIISa8X8qCgiRnhSfOXSLRaQpekin0Qg9pNPoQ6ksdIIE7o2+5g3cgA6W/+mnn/RfuQ3VoiSvcpPcEyr/GNbVCRKoy2mMn+beQOWIXVYnSIDWZjF8lmcClCZuSQXEc0vzGD3Ps57LFLOcbrE8ttKGwW3E8Fgy+6UUEE8ttGlsm7E8lc92Gd1ieWidC0O7iOlBCnZLKCCOW+bSyC5jO5aFTXjdYjlslQ8D+1jDoUTkQysgjlrk07g+13IkF9mwusVy0JoQhg2xpgPpyIXUCWK5JSGNGnJtyzKSCacTxGIrKBiUQg4WJQ0eSgGx1AJKxqSUiyV5g4VRQCxIT9GQFHOyILX3EHoGySk5ZSNSzi2n7N4eV0BySM3BgBxyzNEC54/qFiujxJyMxynXjO1w9phOkAzScjQcx5wztMb6IzpBUkrK2Wicc0/ZJmu3KyAppJRgMAk1pGhZ7lt1i5VQQknGklRLwvZlvk0BSSCdRENJrClBK1PfooB0IJlkI0muLTUJZR7QM0gFJWMwUAw15oFFJ0gZ9WIyTky1poVFASmhWIyGibHmJLDoFquNSjEbJebay8GiE6RIGTVI6X8OLsknrdR7FJD/O6twXLO4anFNC91ilflHNKV+IiatSyG5qlT0gKgRyiOj2gBEvcVSA3Q8T2LXKFpAYm98x2jomSTaLZbCkQaPq/fGqll0EyTWRqdHov0TMWoX1SE9xgbbAKM4RmwaRgNIbI21DUaskESxxVI47OMSi6biAYmlkfYR6DhiDNqKBiSGBnZsY7d3SNdY7BlEeuPc2j5ddMlai5wgkhuWzrr+7paquThApDbKn9WzryRRe1FbLIkNym7XME9K64GYCSKtMWHsbWdVSb0QAYikhtixaPgoUnrCHhApjQhvafsZSOgN6zOIhAbYtyWtiNx7xHaCcBeelo3dZsO5VywB4Sy4WyvSjc61Z+y2WFyFpmtdf5lx7B2rCcJRYH/247EStx6ymSDchOVh1zBZcuolC0A4CRrGcvxW5dJT8oBwEZKfRcNnzKG3pM8gHAQMbzPeGVDvMVlAqAvH25a0sqfca5JbLMqC0bKWnGyo9pzcBKEqlBwr0q2EYu9JTRCKAtG1k8zMqHmADCDUhJFpPx5VUfICiS0WJUF4WEh+llQ8EXyCUBFCvuX4VUjBG0EnCAUB+NkmroxDeyTYBAldeFw2411tSK8EmSAhC+ZtlXizD+UZ7xMkVKHxWktO5SG84xWQEAXKsYdWggr49pC3LZbvwtROchXw6SUvE8RnQXJtoZUVK+DLU84niK9C1D7xKeDDW04niI8C4rOFVuxzkjibIAqHGtmXAi695gQQlwn7El3X4aWAK89Z32K5SpRXuzTbEAq48J7VCeIiwRBC65p8FbDtQWsTxHZifFukmYdWwKYXrQBiM6HQ4ur6MhSw5cncgNhKREZbtApKCtjwZi5AbCRASVDNRZ4CeT2a+ZCed2F5rdCKqCqQx6uZJkieBamKqHnJViCrZ1NPkKwLyZZfq+OgQBbvppogWRbgIJzm6E6BixcvQvfu3Usu8Oeff8KNN94InTt3bvf7v/76C/C/cs+Wy/jy5csmZqdOnUrG/O2336CxsRFqa2uhurq69Z5///0XmpuboVevXtc9lxgQhcOdiSRG/vTTT+Gll14CBOTSpUvw4osvwvLly02puG155pln4OjRo/DPP//AjBkzYNWqVXDDDTfA33//Dc8//zxs2bLFGP2+++6DzZs3w4ABAyrK9P3338Ps2bPh+PHjJsYjjzwCGzZsgJ49e7aLOXToUHjttddg5MiRBhJce+nSpQZUvP+jjz6C0aNHt+Za1djY2FJTU1M2AYVDooXd1fTrr7/C4MGDYdu2bTBp0iQ4ceIEDBs2DD7++GNj3McffxwGDRoE7733Hvzxxx/w8MMPw7PPPgsvvPACrFixAjZt2gR79uyBHj16wHPPPWf+L0IEptJ1//33w/jx4+HNN98EnEwTJkyARx99FJYtW1YyJub46quvGoAxnwMHDsBdd91l4Fi0aBE0NDTATTfdBB1OEIXDnZGkRm5qaoIvv/zSGK9woYHnzZsHTz31FNxyyy1w6tQpuP32282vP/jgA/Np/8UXX8CDDz4I8+fPN1MFrx9++AHuueceOHfuHLz88sswcOBA82mPFxocf75y5UrYsWMHjB071sTG65VXXoFffvkF1q1bVzbmyZMnYfHixdDS0gIffvhha64I9/r16028iod0hUOqhf3WhSYfMWIEHDlyxEyDhx56CC5cuNCaxL59++DJJ5+En3/+Gfr06QO7d+829+OF5sVPctyO4XYIf75r1y64cuUKTJkyBb755hu49dZbrysIzyHDhw+H119/HaZPn14x5sKFC815BCdN4UyCkwjzwa1e2QmicPg1kdTVcKsyefJkWLBgAcyZMwe++uormDp1Kpw5c6a15EOHDpltFh6Su3TpAl9//TXgOaFw4cF57969gFMIt21vv/22AQQBwFjFF26Z8Hxz8803m6mEV6WYOIVwK4Zbv8LB/bHHHjP54BQqOUEUDql29VvX/v374emnn4a33nqrdcuEkwCNjp/yhQvNj1uqwgTZuXMnPPDAA+bXeIjHw/N3330Hd999t/nZvffeaw7wOD2KL/y0x6mC5n7nnXfMoR8vnErlYi5ZssTkg+ef+vp6AwkCgvngBGkHiMLh10RSV8NDL25v8HCN54rChVMCzwn41mnIkCHmx/gWaevWrfDZZ58ZMObOnQuzZs0yvzt27JjZLqEvcRLgmWLt2rXmzRi+JZs5c6a5DyfSmDFjzNsyPOsUX5Vi4tYKwcK4uAbmhRMP88Z4122xFA6pdvVbF0KAWyQ08sSJE9stjtsiPAi///775i0WnknQ1PjG6t133zVvsfDA3q1bN7Mtw7dSGzduBDxUjxo1yrwAwAk0btw4qKurgzvuuAOeeOIJs+Ybb7zRbr1KMQ8fPmzefh08eNBMjzVr1pjzyLfffgt9+/a9NkHwlVphxBR/geJXWl1NggI4DXB64Cd+8YVbFjQrflE3bdo0+PHHH4358d7Vq1e3fg+C32d88skn5vk777wTtm/fbsyKb5Vw+4PnGbzQyJ9//rn5nuS2224zh/mqqqrWJfHtF55v8HuRUjH79+9v7sVXy4VDOj6PYPfu3dsAg2eaqoaGhhZ8Ndf220UJzdIa6CqA2yKcEm2/vcaMz58/bw7i/fr1s1ZApZi4ZTt79qx5I4Znl8JuCs8vVXV1dS34l65du5rXakgR/olX4e/Ff+LPi+8rprbt79LEKMQsF6OgVLkcC78vlX/x7yrFb1t3cf5JYySNX+o+a27QQLkVwG0cDo7/AJQO03bJUvvlAAAAAElFTkSuQmCC" + ) + file.write(data) + file.seek(0) + return file + + +class Book(Base): + __tablename__ = "book" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + cover = Column(ImageField(processors=[ThumbnailGenerator()])) + + def __repr__(self): + return "" % ( + self.id, + self.title, + self.cover, + ) # pragma: no cover + + +class TestThumbnailGenerator: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("test-processor")) + + def test_create_image_with_thumbnail(self, fake_image) -> None: + with Session(engine) as session: + from PIL import Image + + session.add(Book(title="Pointless Meetings", cover=fake_image)) + session.flush() + book = session.execute( + select(Book).where(Book.title == "Pointless Meetings") + ).scalar_one() + assert book.cover["thumbnail"] is not None + thumbnail = StorageManager.get_file(book.cover["thumbnail"]["path"]) + assert thumbnail is not None + thumbnail = Image.open(thumbnail) + assert max(thumbnail.width, thumbnail.height) == 128 + assert book.cover["thumbnail"]["width"] == thumbnail.width + assert book.cover["thumbnail"]["height"] == thumbnail.height + + def teardown(self): + for obj in StorageManager.get().list_objects(): + obj.delete() + StorageManager.get().delete() + Base.metadata.drop_all(engine) diff --git a/tests/test_result_value.py b/tests/test_result_value.py new file mode 100644 index 0000000..baf7278 --- /dev/null +++ b/tests/test_result_value.py @@ -0,0 +1,123 @@ +import pytest +from libcloud.storage.drivers.dummy import DummyFileObject as BaseDummyFileObject +from sqlalchemy import Column, Integer, String, select +from sqlalchemy.orm import Session, declarative_base +from sqlalchemy_file.file import File +from sqlalchemy_file.processors import Processor +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField + +from tests.utils import get_dummy_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +class DummyFile(BaseDummyFileObject): + """Add size just for test purpose""" + + def __init__(self, yield_count=5, chunk_len=10): + super().__init__(yield_count, chunk_len) + self.size = len(self) + self.filename = "dummy-file" + self.content_type = "application/octet-stream" + + +class DictLikeCheckerProcessor(Processor): + def process(self, file: "File", upload_storage=None): + file["dummy_attr"] = "Dummy data" + file["del_attr"] = True + del file["del_attr"] + with pytest.raises(AttributeError): + delattr(file, "del_attr") + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField(processors=[DictLikeCheckerProcessor()])) + multiple_content = Column( + FileField(multiple=True, processors=[DictLikeCheckerProcessor()]) + ) + + def __repr__(self): + return "" % ( + self.id, + self.name, + self.content, + self.multiple_content, + ) # pragma: no cover + + +class TestResultValue: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_dummy_container("test-result-value")) + + def test_single_column_is_dictlike(self) -> None: + with Session(engine) as session: + attachment = Attachment(name="Single content", content=DummyFile()) + session.add(attachment) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Single content") + ).scalar_one() + assert attachment.content.dummy_attr == "Dummy data" + assert "del_attr" not in attachment.content + + def test_multiple_column_is_list_of_dictlike(self) -> None: + with Session(engine) as session: + attachment = Attachment( + name="Multiple content", + multiple_content=[DummyFile(5, 10), DummyFile(10, 20)], + ) + session.add(attachment) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Multiple content") + ).scalar_one() + assert isinstance(attachment.multiple_content, list) + for content in attachment.multiple_content: + assert content.dummy_attr == "Dummy data" + assert "del_attr" not in content + + def test_column_cannot_edit_after_save(self) -> None: + with Session(engine) as session: + attachment = Attachment(name="Single content", content=DummyFile()) + session.add(attachment) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Single content") + ).scalar_one() + with pytest.raises(TypeError): + attachment.content["another_dummy_attr"] = "Another Dummy data" + with pytest.raises(TypeError): + del attachment.content["dummy_attr"] + with pytest.raises(TypeError): + delattr(attachment.content, "dummy_attr") + + def test_multiple_column_cannot_edit_after_save(self) -> None: + with Session(engine) as session: + attachment = Attachment( + name="Multiple content Freeze", + multiple_content=[DummyFile(5, 10), DummyFile(10, 20)], + ) + session.add(attachment) + session.commit() + m_attachment = session.execute( + select(Attachment).where(Attachment.name == "Multiple content Freeze") + ).scalar_one() + # Cannot edit individual list element + for content in m_attachment.multiple_content: + with pytest.raises(TypeError): + content["another_dummy_attr"] = "Another Dummy data" + with pytest.raises(TypeError): + del content["dummy_attr"] + with pytest.raises(TypeError): + delattr(content, "dummy_attr") + + def teardown(self): + Base.metadata.drop_all(engine) diff --git a/tests/test_single_field.py b/tests/test_single_field.py new file mode 100644 index 0000000..033a32a --- /dev/null +++ b/tests/test_single_field.py @@ -0,0 +1,308 @@ +import tempfile + +import pytest +from libcloud.storage.types import ObjectDoesNotExistError +from sqlalchemy import Column, ForeignKey, Integer, String, select +from sqlalchemy.orm import Session, declarative_base, relationship +from sqlalchemy_file.file import File +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +@pytest.fixture +def fake_content(): + return "This is a fake file" + + +@pytest.fixture +def fake_file(fake_content): + file = tempfile.NamedTemporaryFile(suffix=".txt") + file.write(fake_content.encode()) + file.seek(0) + return file + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField) + + article_id = Column(Integer, ForeignKey("article.id")) + + def __repr__(self): + return "" % ( + self.id, + self.name, + self.content, + self.article_id, + ) # pragma: no cover + + +class Article(Base): + __tablename__ = "article" + + id = Column(Integer, autoincrement=True, primary_key=True) + title = Column(String(100), unique=True) + + attachments = relationship(Attachment, cascade="all, delete-orphan") + + def __repr__(self): + return "
" % ( + self.id, + self.title, + len(self.attachments), + self.attachments, + ) # pragma: no cover + + +class TestSingleField: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("test-simple-field")) + + def test_create_from_string(self, fake_content) -> None: + with Session(engine) as session: + session.add(Attachment(name="Create fake string", content=fake_content)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create fake string") + ).scalar_one() + assert attachment.content.saved + assert attachment.content.file.read() == fake_content.encode() + + def test_create_from_bytes(self, fake_content) -> None: + with Session(engine) as session: + session.add( + Attachment(name="Create Fake bytes", content=fake_content.encode()) + ) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create Fake bytes") + ).scalar_one() + assert attachment.content.saved + assert attachment.content.file.read() == fake_content.encode() + + def test_create_fromfile(self, fake_file, fake_content) -> None: + with Session(engine) as session: + session.add(Attachment(name="Create Fake file", content=fake_file)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create Fake file") + ).scalar_one() + assert attachment.content.saved + assert attachment.content.file.read() == fake_content.encode() + + def test_file_is_created_when_flush(self, fake_file, fake_content) -> None: + with Session(engine) as session: + attachment = Attachment(name="Create Fake file 2", content=File(fake_file)) + session.add(attachment) + with pytest.raises(RuntimeError): + assert attachment.content.file is not None + session.flush() + assert attachment.content.file is not None + + def test_create_rollback(self, fake_file, fake_content) -> None: + with Session(engine) as session: + session.add(Attachment(name="Create rollback", content=fake_file)) + session.flush() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create rollback") + ).scalar_one() + file_id = attachment.content.file_id + assert StorageManager.get().get_object(file_id) is not None + session.rollback() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(file_id) + + def test_edit_existing(self, fake_file) -> None: + with Session(engine) as session: + session.add(Attachment(name="Editing test", content=fake_file)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Editing test") + ).scalar_one() + old_file_id = attachment.content.file_id + attachment.content = b"New content" + session.add(attachment) + session.commit() + session.refresh(attachment) + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(old_file_id) + assert attachment.content.file.read() == b"New content" + + def test_edit_existing_none(self, fake_file, fake_content) -> None: + with Session(engine) as session: + session.add(Attachment(name="Testing None edit", content=None)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Testing None edit") + ).scalar_one() + attachment.content = fake_file + session.add(attachment) + session.commit() + session.refresh(attachment) + assert attachment.content.file.read() == fake_content.encode() + + def test_edit_existing_rollback(self, fake_file) -> None: + with Session(engine) as session: + session.add( + Attachment(name="Editing test rollback", content=b"Initial content") + ) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Editing test rollback") + ).scalar_one() + old_file_id = attachment.content.file_id + attachment.content = b"New content" + session.add(attachment) + session.flush() + session.refresh(attachment) + new_file_id = attachment.content.file_id + session.rollback() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(new_file_id) + assert StorageManager.get().get_object(old_file_id) is not None + assert attachment.content.file.read() == b"Initial content" + + def test_edit_existing_multiple_flush(self, fake_file) -> None: + with Session(engine) as session: + attachment = Attachment( + name="Multiple flush edit", content=b"first content" + ) + session.add(attachment) + session.flush() + session.refresh(attachment) + before_first_edit_fileid = attachment.content.file_id + attachment.content = b"first edit" + session.add(attachment) + session.flush() + session.refresh(attachment) + first_edit_fileid = attachment.content.file_id + attachment.content = b"second edit" + session.add(attachment) + session.flush() + second_edit_fileid = attachment.content.file_id + session.commit() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(before_first_edit_fileid) + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(first_edit_fileid) + assert StorageManager.get().get_object(second_edit_fileid) is not None + assert attachment.content.file.read() == b"second edit" + + def test_delete_existing(self, fake_file) -> None: + with Session(engine) as session: + session.add(Attachment(name="Deleting test", content=fake_file)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Deleting test") + ).scalar_one() + file_id = attachment.content.file_id + assert StorageManager.get().get_object(file_id) is not None + session.delete(attachment) + session.commit() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(file_id) + + def test_delete_existing_rollback(self, fake_file) -> None: + with Session(engine) as session: + session.add(Attachment(name="Deleting rollback test", content=fake_file)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Deleting rollback test") + ).scalar_one() + file_id = attachment.content.file_id + assert StorageManager.get().get_object(file_id) is not None + session.delete(attachment) + session.flush() + session.rollback() + assert StorageManager.get().get_object(file_id) is not None + + def test_relationship(self, fake_file, fake_content) -> None: + with Session(engine) as session: + article = Article(title="Great article!") + session.add(article) + article.attachments.append(Attachment(name="Banner", content=fake_file)) + session.commit() + article = session.execute( + select(Article).where(Article.title == "Great article!") + ).scalar_one() + attachment = article.attachments[0] + assert attachment.content.file.read() == fake_content.encode() + file_path = attachment.content.path + article.attachments.remove(attachment) + session.add(article) + session.commit() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(file_path) + + def test_relationship_rollback(self, fake_file, fake_content) -> None: + with Session(engine) as session: + article = Article(title="Awesome article about shark!") + session.add(article) + article.attachments.append(Attachment(name="Shark", content=fake_file)) + session.flush() + article = session.execute( + select(Article).where(Article.title == "Awesome article about shark!") + ).scalar_one() + attachment = article.attachments[0] + assert attachment.content.file.read() == fake_content.encode() + file_path = attachment.content.path + session.rollback() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(file_path) + + def test_relationship_cascade_delete(self, fake_file, fake_content) -> None: + with Session(engine) as session: + article = Article(title="Another Great article!") + session.add(article) + article.attachments.append( + Attachment(name="Another Banner", content=fake_file) + ) + session.commit() + article = session.execute( + select(Article).where(Article.title == "Another Great article!") + ).scalar_one() + attachment = article.attachments[0] + assert attachment.content.file.read() == fake_content.encode() + file_path = attachment.content.path + session.delete(article) + session.commit() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get_file(file_path) + + def test_relationship_cascade_delete_rollback( + self, fake_file, fake_content + ) -> None: + with Session(engine) as session: + article = Article(title="Another Great article for rollback!") + session.add(article) + article.attachments.append( + Attachment(name="Another Banner for rollback", content=fake_file) + ) + session.commit() + article = session.execute( + select(Article).where( + Article.title == "Another Great article for rollback!" + ) + ).scalar_one() + file_path = article.attachments[0].content.path + assert StorageManager.get_file(file_path) is not None + session.delete(article) + session.flush() + session.rollback() + assert StorageManager.get_file(file_path) is not None + + def teardown(self): + for obj in StorageManager.get().list_objects(): + obj.delete() + StorageManager.get().delete() + Base.metadata.drop_all(engine) diff --git a/tests/test_size_validator.py b/tests/test_size_validator.py new file mode 100644 index 0000000..42e54b2 --- /dev/null +++ b/tests/test_size_validator.py @@ -0,0 +1,88 @@ +import tempfile + +import pytest +from sqlalchemy import Column, Integer, String +from sqlalchemy.orm import Session, declarative_base +from sqlalchemy_file.exceptions import SizeValidationError +from sqlalchemy_file.helpers import convert_size +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField +from sqlalchemy_file.validators import SizeValidator + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() +Base = declarative_base() + + +@pytest.fixture +def fake_huge_file(): + file = tempfile.NamedTemporaryFile() + file.write(b"\x00" * 6000) + file.seek(0) + return file + + +@pytest.fixture +def fake_valid_file(): + file = tempfile.NamedTemporaryFile() + file.write(b"\x00" * 3000) + file.seek(0) + return file + + +class Attachment(Base): + __tablename__ = "attachment" + + id = Column(Integer, autoincrement=True, primary_key=True) + name = Column(String(50), unique=True) + content = Column(FileField(validators=[SizeValidator("5K")])) + + def __repr__(self): + return "" % ( + self.id, + self.name, + self.content, + ) # pragma: no cover + + +class TestSizeValidator: + def setup(self) -> None: + Base.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("test-size-validator")) + + def test_size_converter(self) -> None: + assert convert_size(100) == 100 + assert convert_size("3k") == 3 * 1000 + assert convert_size("4K") == 4 * 1000 + assert convert_size("2M") == 2 * 1000**2 + assert convert_size("3Ki") == 3 * 1024 + assert convert_size("3Mi") == 3 * 1024**2 + with pytest.raises(ValueError): + convert_size("25") + with pytest.raises(ValueError): + convert_size("25V") + + def test_size_validator_large_file(self, fake_huge_file) -> None: + with Session(engine) as session: + session.add(Attachment(name="Huge File", content=fake_huge_file)) + with pytest.raises(SizeValidationError): + session.flush() + + def test_size_validator_valid_size(self, fake_valid_file) -> None: + with Session(engine) as session: + attachment = Attachment(name="Valid File Size", content=fake_valid_file) + session.add(attachment) + session.commit() + session.refresh(attachment) + assert StorageManager.get_file(attachment.content.path) is not None + print(type(attachment.content.file)) + print(attachment.content.file) + assert attachment.content.file is not None + + def teardown(self): + for obj in StorageManager.get().list_objects(): + obj.delete() + StorageManager.get().delete() + Base.metadata.drop_all(engine) diff --git a/tests/test_sqlmodel.py b/tests/test_sqlmodel.py new file mode 100644 index 0000000..4a67a64 --- /dev/null +++ b/tests/test_sqlmodel.py @@ -0,0 +1,167 @@ +import tempfile +from typing import Any + +import pytest +from libcloud.storage.types import ObjectDoesNotExistError +from sqlalchemy import Column, select +from sqlalchemy_file.file import File +from sqlalchemy_file.storage import StorageManager +from sqlalchemy_file.types import FileField +from sqlmodel import Field, Session, SQLModel + +from tests.utils import get_test_container, get_test_engine + +engine = get_test_engine() + + +@pytest.fixture +def fake_content(): + return "This is a fake file" + + +@pytest.fixture +def fake_file(fake_content): + file = tempfile.NamedTemporaryFile() + file.write(fake_content.encode()) + file.seek(0) + return file + + +class Attachment(SQLModel, table=True): + __tablename__ = "attachment" + + id: int = Field(None, primary_key=True) + name: str = Field(..., sa_column_kwargs=dict(unique=True)) + content: Any = Field(sa_column=Column(FileField)) + + +class TestSQLModel: + def setup(self) -> None: + SQLModel.metadata.create_all(engine) + StorageManager._clear() + StorageManager.add_storage("test", get_test_container("test-sqlmodel")) + + def test_create_from_string(self, fake_content) -> None: + with Session(engine) as session: + session.add(Attachment(name="Create fake string", content=fake_content)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create fake string") + ).scalar_one() + assert attachment.content.saved + assert attachment.content.file.read() == fake_content.encode() + + def test_create_from_bytes(self, fake_content) -> None: + with Session(engine) as session: + session.add( + Attachment(name="Create Fake bytes", content=fake_content.encode()) + ) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create Fake bytes") + ).scalar_one() + assert attachment.content.saved + assert attachment.content.file.read() == fake_content.encode() + + def test_create_fromfile(self, fake_file, fake_content) -> None: + with Session(engine) as session: + session.add(Attachment(name="Create Fake file", content=fake_file)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create Fake file") + ).scalar_one() + assert attachment.content.saved + assert attachment.content.file.read() == fake_content.encode() + + def test_file_is_created_when_flush(self, fake_file, fake_content) -> None: + with Session(engine) as session: + attachment = Attachment(name="Create Fake file 2", content=File(fake_file)) + session.add(attachment) + with pytest.raises(RuntimeError): + assert attachment.content.file is not None + session.flush() + assert attachment.content.file is not None + + def test_create_rollback(self, fake_file, fake_content) -> None: + with Session(engine) as session: + session.add(Attachment(name="Create rollback", content=fake_file)) + session.flush() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Create rollback") + ).scalar_one() + file_id = attachment.content.file_id + assert StorageManager.get().get_object(file_id) is not None + session.rollback() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(file_id) + + def test_edit_existing(self, fake_file) -> None: + with Session(engine) as session: + session.add(Attachment(name="Editing test", content=fake_file)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Editing test") + ).scalar_one() + old_file_id = attachment.content.file_id + attachment.content = b"New content" + session.add(attachment) + session.commit() + session.refresh(attachment) + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(old_file_id) + assert attachment.content.file.read() == b"New content" + + def test_edit_existing_rollback(self, fake_file) -> None: + with Session(engine) as session: + session.add( + Attachment(name="Editing test rollback", content=b"Initial content") + ) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Editing test rollback") + ).scalar_one() + old_file_id = attachment.content.file_id + attachment.content = b"New content" + session.add(attachment) + session.flush() + session.refresh(attachment) + new_file_id = attachment.content.file_id + session.rollback() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(new_file_id) + assert StorageManager.get().get_object(old_file_id) is not None + assert attachment.content.file.read() == b"Initial content" + + def test_delete_existing(self, fake_file) -> None: + with Session(engine) as session: + session.add(Attachment(name="Deleting test", content=fake_file)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Deleting test") + ).scalar_one() + file_id = attachment.content.file_id + assert StorageManager.get().get_object(file_id) is not None + session.delete(attachment) + session.commit() + with pytest.raises(ObjectDoesNotExistError): + StorageManager.get().get_object(file_id) + + def test_delete_existing_rollback(self, fake_file) -> None: + with Session(engine) as session: + session.add(Attachment(name="Deleting rollback test", content=fake_file)) + session.commit() + attachment = session.execute( + select(Attachment).where(Attachment.name == "Deleting rollback test") + ).scalar_one() + file_id = attachment.content.file_id + assert StorageManager.get().get_object(file_id) is not None + session.delete(attachment) + session.flush() + session.rollback() + assert StorageManager.get().get_object(file_id) is not None + + def teardown(self): + for obj in StorageManager.get().list_objects(): + obj.delete() + StorageManager.get().delete() + SQLModel.metadata.drop_all(engine) diff --git a/tests/test_storage_manager.py b/tests/test_storage_manager.py new file mode 100644 index 0000000..958f7af --- /dev/null +++ b/tests/test_storage_manager.py @@ -0,0 +1,39 @@ +import pytest +from sqlalchemy_file.storage import StorageManager + +from tests.utils import get_dummy_container + + +class TestStorageManager: + def setup(self) -> None: + StorageManager._clear() + + def test_first_configured_is_default(self) -> None: + StorageManager.add_storage("first", get_dummy_container("first")) + StorageManager.add_storage("second", get_dummy_container("second")) + assert StorageManager.get_default() == "first" + + def test_changing_default_storage_works(self) -> None: + StorageManager.add_storage("first", get_dummy_container("first")) + StorageManager.add_storage("second", get_dummy_container("second")) + StorageManager.set_default("second") + assert StorageManager.get_default() == "second" + + def test_no_storage_is_detected(self) -> None: + with pytest.raises(RuntimeError): + StorageManager.get_default() + with pytest.raises(RuntimeError): + StorageManager.get() + + def test_prevent_non_existing_default(self) -> None: + with pytest.raises(RuntimeError): + StorageManager.set_default("does_not_exists") + + def test_prevent_non_existing(self) -> None: + with pytest.raises(RuntimeError): + StorageManager.get("does_not_exists") + + def test_unique_storage_name(self) -> None: + StorageManager.add_storage("first", get_dummy_container("first")) + with pytest.raises(RuntimeError): + StorageManager.add_storage("first", get_dummy_container("second")) diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..675a7fd --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,46 @@ +import os + +from libcloud.storage.base import Container, StorageDriver +from libcloud.storage.drivers.dummy import DummyStorageDriver +from libcloud.storage.drivers.local import LocalStorageDriver +from libcloud.storage.drivers.minio import MinIOStorageDriver +from libcloud.storage.types import ContainerDoesNotExistError +from sqlalchemy import create_engine +from sqlalchemy.engine import Engine + + +def get_test_engine() -> Engine: + return create_engine( + os.environ.get("ENGINE", "sqlite:///:memory:?check_same_thread=False") + ) + + +def get_or_create_container(driver: StorageDriver, name: str) -> Container: + try: + return driver.get_container(name) + except ContainerDoesNotExistError: + return driver.create_container(name) + + +def get_dummy_container(name: str) -> Container: + return get_or_create_container(DummyStorageDriver("xxx", "xxx"), name) + + +def get_test_container(name: str) -> Container: + provider = os.environ.get("STORAGE_PROVIDER", "LOCAL") + if provider == "MINIO": + key = os.environ.get("MINIO_KEY", "minioadmin") + secret = os.environ.get("MINIO_SECRET", "minioadmin") + host = os.environ.get("MINIO_HOST", "127.0.0.1") + port = int(os.environ.get("MINIO_PORT", "9000")) + secure = os.environ.get("MINIO_SECURE", "False").lower() == "true" + return get_or_create_container( + MinIOStorageDriver( + key=key, secret=secret, host=host, port=port, secure=secure + ), + name, + ) + else: + dir_path = os.environ.get("LOCAL_PATH", "/tmp/storage") + os.makedirs(dir_path, 0o777, exist_ok=True) + return get_or_create_container(LocalStorageDriver(dir_path), name)