diff --git a/.github/workflows/cicd.yaml b/.github/workflows/cicd.yaml new file mode 100644 index 000000000..e07d65260 --- /dev/null +++ b/.github/workflows/cicd.yaml @@ -0,0 +1,78 @@ +name: arturo-stac-api +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + test: + runs-on: ubuntu-latest + timeout-minutes: 10 + + services: + db_service: + image: kartoza/postgis:latest + env: + POSTGRES_USER: username + POSTGRES_PASS: password + POSTGRES_DBNAME: postgis + POSTGRES_HOST: localhost + POSTGRES_PORT: 5432 + ALLOW_IP_RANGE: 0.0.0.0/0 + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # Maps tcp port 5432 on service container to the host + - 5432:5432 + + steps: + - name: Check out repository code + uses: actions/checkout@v2 + + # Setup Python (faster than using Python container) + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.8" + + - name: Install pipenv + run: | + python -m pip install --upgrade pipenv wheel + + - id: cache-pipenv + uses: actions/cache@v1 + with: + path: ~/.local/share/virtualenvs + key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }} + + - name: Install dependencies + if: steps.cache-pipenv.outputs.cache-hit != 'true' + run: | + pipenv install --deploy --dev + + - name: Run migration + run: | + pipenv run alembic upgrade head + env: + POSTGRES_USER: username + POSTGRES_PASS: password + POSTGRES_DBNAME: postgis + POSTGRES_HOST: localhost + POSTGRES_PORT: 5432 + + - name: Run test suite + run: | + pipenv run pytest -svvv + env: + ENVIRONMENT: testing + POSTGRES_USER: username + POSTGRES_PASS: password + POSTGRES_DBNAME: postgis + POSTGRES_HOST_READER: localhost + POSTGRES_HOST_WRITER: localhost + POSTGRES_PORT: 5432 \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..5e03e8674 --- /dev/null +++ b/.gitignore @@ -0,0 +1,121 @@ +PIP_EXTRA_INDEX_URL +*.txt +!tests/resources/*.jpg +**.pyc +**.log +*.mat +target/* +src/local/* +src/local-test/* +*.iml +.idea/ +model/ +.DS_Store +#config.yaml +**.save +*.jpg +**.save.* +**.bak +.DS_Store +.mvn/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# user specific overrides +tests/tests.ini +tests/logging.ini + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# dotenv +.env + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# skaffold temporary build/deploy files +build.out \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..db791fdcf --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,6 @@ +repos: + - repo: https://github.com/python/black + rev: stable + hooks: + - id: black + language_version: python3.8 \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..75febec4c --- /dev/null +++ b/Dockerfile @@ -0,0 +1,31 @@ +FROM python:3.8-slim + +# Any python libraries that require system libraries to be installed will likely +# need the following packages in order to build +RUN apt-get update && apt-get install -y \ + build-essential \ + libffi-dev \ + libssl-dev \ + git + +RUN pip install pipenv +ENV PIPENV_NOSPIN=true +ENV PIPENV_HIDE_EMOJIS=true + +ARG install_dev_dependencies=true + +WORKDIR /app + +COPY Pipfile Pipfile.lock ./ +RUN pipenv install --deploy --ignore-pipfile ${install_dev_dependencies:+--dev} + +COPY . ./ + +ENV APP_HOST=0.0.0.0 +ENV APP_PORT=80 + +ENV RELOAD='' + + +ENTRYPOINT ["pipenv", "run"] +CMD uvicorn stac_api.app:app --host=${APP_HOST} --port=${APP_PORT} ${RELOAD:+--reload} diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..7427f5ac1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Arturo AI + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..3dd742080 --- /dev/null +++ b/Makefile @@ -0,0 +1,25 @@ +#!make +APP_HOST ?= 0.0.0.0 +APP_PORT ?= 8080 +EXTERNAL_APP_PORT ?= ${APP_PORT} +run_docker = docker run -it --rm \ + -p ${EXTERNAL_APP_PORT}:${APP_PORT} \ + -v $(shell pwd):/app \ + --env APP_HOST=${APP_HOST} \ + --env APP_PORT=${APP_PORT} \ + --env POSTGRES_USER=username \ + --env POSTGRES_PASS=password \ + --env POSTGRES_DBNAME=postgis \ + --env POSTGRES_HOST_READER=host.docker.internal \ + --env POSTGRES_HOST_WRITER=host.docker.internal \ + --env POSTGRES_PORT=5432 \ + --env ENVIRONMENT=development \ + arturo-stac-api_app + +.PHONY: docker-shell +docker-shell: + $(run_docker) /bin/bash + +.PHONY: test +test: + $(run_docker) pytest \ No newline at end of file diff --git a/Pipfile b/Pipfile new file mode 100644 index 000000000..4a3fa9b77 --- /dev/null +++ b/Pipfile @@ -0,0 +1,24 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = "true" + +[packages] +uvicorn = "*" +fastapi = {extras = ["all"],version = "*"} +alembic = "*" +psycopg2-binary = "*" +shapely = "*" +sqlalchemy = "*" +geoalchemy2 = "<0.8.0" +sqlakeyset = "*" +stac-pydantic = "*" + +[dev-packages] +pytest = "*" +pytest-cov = "*" +pytest-asyncio = "*" +requests = "*" + +[requires] +python_version = "3.8" \ No newline at end of file diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 000000000..737020444 --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,713 @@ +{ + "_meta": { + "hash": { + "sha256": "ed6a1da91f611870130070913e27eb5d83f7c95f2919e5aa187e4b1162f6e89c" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.8" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": "true" + } + ] + }, + "default": { + "aiofiles": { + "hashes": [ + "sha256:377fdf7815cc611870c59cbd07b68b180841d2a2b79812d8c218be02448c2acb", + "sha256:98e6bcfd1b50f97db4980e182ddd509b7cc35909e903a8fe50d8849e02d815af" + ], + "version": "==0.5.0" + }, + "alembic": { + "hashes": [ + "sha256:035ab00497217628bf5d0be82d664d8713ab13d37b630084da8e1f98facf4dbf" + ], + "index": "pypi", + "version": "==1.4.2" + }, + "aniso8601": { + "hashes": [ + "sha256:513d2b6637b7853806ae79ffaca6f3e8754bdd547048f5ccc1420aec4b714f1e", + "sha256:d10a4bf949f619f719b227ef5386e31f49a2b6d453004b21f02661ccc8670c7b" + ], + "version": "==7.0.0" + }, + "async-exit-stack": { + "hashes": [ + "sha256:24de1ad6d0ff27be97c89d6709fa49bf20db179eaf1f4d2e6e9b4409b80e747d", + "sha256:9b43b17683b3438f428ef3bbec20689f5abbb052aa4b564c643397330adfaa99" + ], + "version": "==1.0.1" + }, + "async-generator": { + "hashes": [ + "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b", + "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144" + ], + "version": "==1.10" + }, + "certifi": { + "hashes": [ + "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", + "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41" + ], + "version": "==2020.6.20" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==7.1.2" + }, + "dnspython": { + "hashes": [ + "sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01", + "sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" + }, + "email-validator": { + "hashes": [ + "sha256:5f246ae8d81ce3000eade06595b7bb55a4cf350d559e890182a1466a21f25067", + "sha256:63094045c3e802c3d3d575b18b004a531c36243ca8d1cec785ff6bfcb04185bb" + ], + "version": "==1.1.1" + }, + "fastapi": { + "extras": [ + "all" + ], + "hashes": [ + "sha256:bfea20cc164885af99bad4bc680a99fe3b75d8d43b278ad51f1501d97cf8f762", + "sha256:cdd953e806d5c6bee879178d4e6c7b2ebcee219ea252a39c1c98ad8c7b798235" + ], + "index": "pypi", + "version": "==0.58.0" + }, + "geoalchemy2": { + "hashes": [ + "sha256:379b0fc4ca5f9b5ef625719f47e22c9b8abd347aa78344e85f99d32594cfccd4", + "sha256:ec2a6e9919b522631803ac5922e88b701081da7e5d56a68f10ff263f6592d552" + ], + "index": "pypi", + "version": "==0.7.0" + }, + "geojson": { + "hashes": [ + "sha256:6e4bb7ace4226a45d9c8c8b1348b3fc43540658359f93c3f7e03efa9f15f658a", + "sha256:ccbd13368dd728f4e4f13ffe6aaf725b6e802c692ba0dde628be475040c534ba" + ], + "version": "==2.5.0" + }, + "geojson-pydantic": { + "hashes": [ + "sha256:f15f0af0a1ed1749cb94d202745759be05eb614de7456bc323b284744aaeb112" + ], + "markers": "python_version >= '3.6'", + "version": "==0.1.0" + }, + "graphene": { + "hashes": [ + "sha256:09165f03e1591b76bf57b133482db9be6dac72c74b0a628d3c93182af9c5a896", + "sha256:2cbe6d4ef15cfc7b7805e0760a0e5b80747161ce1b0f990dfdc0d2cf497c12f9" + ], + "version": "==2.1.8" + }, + "graphql-core": { + "hashes": [ + "sha256:44c9bac4514e5e30c5a595fac8e3c76c1975cae14db215e8174c7fe995825bad", + "sha256:aac46a9ac524c9855910c14c48fc5d60474def7f99fd10245e76608eba7af746" + ], + "version": "==2.3.2" + }, + "graphql-relay": { + "hashes": [ + "sha256:870b6b5304123a38a0b215a79eace021acce5a466bf40cd39fa18cb8528afabb", + "sha256:ac514cb86db9a43014d7e73511d521137ac12cf0101b2eaa5f0a3da2e10d913d" + ], + "version": "==2.0.1" + }, + "h11": { + "hashes": [ + "sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1", + "sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1" + ], + "version": "==0.9.0" + }, + "httptools": { + "hashes": [ + "sha256:0a4b1b2012b28e68306575ad14ad5e9120b34fccd02a81eb08838d7e3bbb48be", + "sha256:3592e854424ec94bd17dc3e0c96a64e459ec4147e6d53c0a42d0ebcef9cb9c5d", + "sha256:41b573cf33f64a8f8f3400d0a7faf48e1888582b6f6e02b82b9bd4f0bf7497ce", + "sha256:56b6393c6ac7abe632f2294da53f30d279130a92e8ae39d8d14ee2e1b05ad1f2", + "sha256:86c6acd66765a934e8730bf0e9dfaac6fdcf2a4334212bd4a0a1c78f16475ca6", + "sha256:96da81e1992be8ac2fd5597bf0283d832287e20cb3cfde8996d2b00356d4e17f", + "sha256:96eb359252aeed57ea5c7b3d79839aaa0382c9d3149f7d24dd7172b1bcecb009", + "sha256:a2719e1d7a84bb131c4f1e0cb79705034b48de6ae486eb5297a139d6a3296dce", + "sha256:ac0aa11e99454b6a66989aa2d44bca41d4e0f968e395a0a8f164b401fefe359a", + "sha256:bc3114b9edbca5a1eb7ae7db698c669eb53eb8afbbebdde116c174925260849c", + "sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4", + "sha256:fea04e126014169384dee76a153d4573d90d0cbd1d12185da089f73c78390437" + ], + "markers": "sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'", + "version": "==0.1.1" + }, + "idna": { + "hashes": [ + "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", + "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.9" + }, + "itsdangerous": { + "hashes": [ + "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19", + "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749" + ], + "version": "==1.1.0" + }, + "jinja2": { + "hashes": [ + "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0", + "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035" + ], + "version": "==2.11.2" + }, + "mako": { + "hashes": [ + "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27", + "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.1.3" + }, + "markupsafe": { + "hashes": [ + "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", + "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", + "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", + "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", + "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42", + "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", + "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", + "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", + "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", + "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", + "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", + "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b", + "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", + "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15", + "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", + "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", + "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", + "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", + "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", + "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", + "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", + "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", + "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", + "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", + "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", + "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", + "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", + "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", + "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", + "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", + "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2", + "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7", + "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.1.1" + }, + "orjson": { + "hashes": [ + "sha256:003c96a677aa0b64a3cdd60f71b442ca8be09305a4d6b8ddafe136b4e9a89b8f", + "sha256:07fe57f80a771e60f02f945a6dbb036e6aab835f6f9779bc4aa192d858fb2381", + "sha256:0c72bfb36a322580cf1e212f7d040f5e38eba66ea3afea909c531cf83927724e", + "sha256:2044683c455a665d7e0cff988fbc1fea0ec283cf0c02029161fb1510fd7b6a6a", + "sha256:34ffec09b427488b874d108659c5fc30dbd62789808917fd28a79ca41a7cc484", + "sha256:3670a379d4ba3a222d0ee22ecb1a2e3c643b982ecc82e22a42c3a9f9fd24193b", + "sha256:3f4414481537e1882f96f640f7932919b09dccef3bb5ae1312a074806c7e24eb", + "sha256:409ed91d9ab7f7bdb4d50768b9697051b0891d1a7e9c04818b4a00bf2d6bddd7", + "sha256:6d3d786735e126a40905b0910528f02ef7087c78de825b6aab8d39c08500b4e2", + "sha256:6ff421ae71b4b148b8c859179ae55287513b546188ef6d8925048a2de5f85319", + "sha256:7ef3aec433ef0443d458b6045bebc57d6692094dcf778b9c22882e9dc210063e", + "sha256:81e82fb560ba1ff78e70d217e62c4381c2416dcdb2c440b830121226f8cf00da", + "sha256:8de85c2c814cbfa595b60655856d91c49678c8d8efe97101345824aa794a0f6e", + "sha256:c49eb54e3cfc4c0b14ca566b8ec2f3bc6f5860b0f60145e6003a40a14227554a", + "sha256:e6f150bb1bd89e324d4b1768bdaca90536f0a42e32b7ab5fe31aa4b23ee779ef" + ], + "version": "==3.1.1" + }, + "promise": { + "hashes": [ + "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0" + ], + "version": "==2.3" + }, + "psycopg2-binary": { + "hashes": [ + "sha256:008da3ab51adc70a5f1cfbbe5db3a22607ab030eb44bcecf517ad11a0c2b3cac", + "sha256:07cf82c870ec2d2ce94d18e70c13323c89f2f2a2628cbf1feee700630be2519a", + "sha256:08507efbe532029adee21b8d4c999170a83760d38249936038bd0602327029b5", + "sha256:107d9be3b614e52a192719c6bf32e8813030020ea1d1215daa86ded9a24d8b04", + "sha256:17a0ea0b0eabf07035e5e0d520dabc7950aeb15a17c6d36128ba99b2721b25b1", + "sha256:3286541b9d85a340ee4ed42732d15fc1bb441dc500c97243a768154ab8505bb5", + "sha256:3939cf75fc89c5e9ed836e228c4a63604dff95ad19aed2bbf71d5d04c15ed5ce", + "sha256:40abc319f7f26c042a11658bf3dd3b0b3bceccf883ec1c565d5c909a90204434", + "sha256:51f7823f1b087d2020d8e8c9e6687473d3d239ba9afc162d9b2ab6e80b53f9f9", + "sha256:6bb2dd006a46a4a4ce95201f836194eb6a1e863f69ee5bab506673e0ca767057", + "sha256:702f09d8f77dc4794651f650828791af82f7c2efd8c91ae79e3d9fe4bb7d4c98", + "sha256:7036ccf715925251fac969f4da9ad37e4b7e211b1e920860148a10c0de963522", + "sha256:7b832d76cc65c092abd9505cc670c4e3421fd136fb6ea5b94efbe4c146572505", + "sha256:8f74e631b67482d504d7e9cf364071fc5d54c28e79a093ff402d5f8f81e23bfa", + "sha256:930315ac53dc65cbf52ab6b6d27422611f5fb461d763c531db229c7e1af6c0b3", + "sha256:96d3038f5bd061401996614f65d27a4ecb62d843eb4f48e212e6d129171a721f", + "sha256:a20299ee0ea2f9cca494396ac472d6e636745652a64a418b39522c120fd0a0a4", + "sha256:a34826d6465c2e2bbe9d0605f944f19d2480589f89863ed5f091943be27c9de4", + "sha256:a69970ee896e21db4c57e398646af9edc71c003bc52a3cc77fb150240fefd266", + "sha256:b9a8b391c2b0321e0cd7ec6b4cfcc3dd6349347bd1207d48bcb752aa6c553a66", + "sha256:ba13346ff6d3eb2dca0b6fa0d8a9d999eff3dcd9b55f3a890f12b0b6362b2b38", + "sha256:bb0608694a91db1e230b4a314e8ed00ad07ed0c518f9a69b83af2717e31291a3", + "sha256:c8830b7d5f16fd79d39b21e3d94f247219036b29b30c8270314c46bf8b732389", + "sha256:cac918cd7c4c498a60f5d2a61d4f0a6091c2c9490d81bc805c963444032d0dab", + "sha256:cc30cb900f42c8a246e2cb76539d9726f407330bc244ca7729c41a44e8d807fb", + "sha256:ccdc6a87f32b491129ada4b87a43b1895cf2c20fdb7f98ad979647506ffc41b6", + "sha256:d1a8b01f6a964fec702d6b6dac1f91f2b9f9fe41b310cbb16c7ef1fac82df06d", + "sha256:e004db88e5a75e5fdab1620fb9f90c9598c2a195a594225ac4ed2a6f1c23e162", + "sha256:eb2f43ae3037f1ef5e19339c41cf56947021ac892f668765cd65f8ab9814192e", + "sha256:fa466306fcf6b39b8a61d003123d442b23707d635a5cb05ac4e1b62cc79105cd" + ], + "index": "pypi", + "version": "==2.8.5" + }, + "pydantic": { + "hashes": [ + "sha256:0a1cdf24e567d42dc762d3fed399bd211a13db2e8462af9dfa93b34c41648efb", + "sha256:2007eb062ed0e57875ce8ead12760a6e44bf5836e6a1a7ea81d71eeecf3ede0f", + "sha256:20a15a303ce1e4d831b4e79c17a4a29cb6740b12524f5bba3ea363bff65732bc", + "sha256:2a6904e9f18dea58f76f16b95cba6a2f20b72d787abd84ecd67ebc526e61dce6", + "sha256:3714a4056f5bdbecf3a41e0706ec9b228c9513eee2ad884dc2c568c4dfa540e9", + "sha256:473101121b1bd454c8effc9fe66d54812fdc128184d9015c5aaa0d4e58a6d338", + "sha256:68dece67bff2b3a5cc188258e46b49f676a722304f1c6148ae08e9291e284d98", + "sha256:70f27d2f0268f490fe3de0a9b6fca7b7492b8fd6623f9fecd25b221ebee385e3", + "sha256:8433dbb87246c0f562af75d00fa80155b74e4f6924b0db6a2078a3cd2f11c6c4", + "sha256:8be325fc9da897029ee48d1b5e40df817d97fe969f3ac3fd2434ba7e198c55d5", + "sha256:93b9f265329d9827f39f0fca68f5d72cc8321881cdc519a1304fa73b9f8a75bd", + "sha256:9be755919258d5d168aeffbe913ed6e8bd562e018df7724b68cabdee3371e331", + "sha256:ab863853cb502480b118187d670f753be65ec144e1654924bec33d63bc8b3ce2", + "sha256:b96ce81c4b5ca62ab81181212edfd057beaa41411cd9700fbcb48a6ba6564b4e", + "sha256:da8099fca5ee339d5572cfa8af12cf0856ae993406f0b1eb9bb38c8a660e7416", + "sha256:e2c753d355126ddd1eefeb167fa61c7037ecd30b98e7ebecdc0d1da463b4ea09", + "sha256:f0018613c7a0d19df3240c2a913849786f21b6539b9f23d85ce4067489dfacfa" + ], + "markers": "python_version >= '3.6'", + "version": "==1.5.1" + }, + "python-dateutil": { + "hashes": [ + "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", + "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.8.1" + }, + "python-editor": { + "hashes": [ + "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d", + "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b", + "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8", + "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77", + "sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522" + ], + "version": "==1.0.4" + }, + "python-multipart": { + "hashes": [ + "sha256:f7bb5f611fc600d15fa47b3974c8aa16e93724513b49b5f95c81e6624c83fa43" + ], + "version": "==0.0.5" + }, + "pyyaml": { + "hashes": [ + "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97", + "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76", + "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2", + "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648", + "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf", + "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f", + "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2", + "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee", + "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d", + "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c", + "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a" + ], + "version": "==5.3.1" + }, + "requests": { + "hashes": [ + "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", + "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" + ], + "version": "==2.24.0" + }, + "rx": { + "hashes": [ + "sha256:13a1d8d9e252625c173dc795471e614eadfe1cf40ffc684e08b8fff0d9748c23", + "sha256:7357592bc7e881a95e0c2013b73326f704953301ab551fbc8133a6fadab84105" + ], + "version": "==1.6.1" + }, + "shapely": { + "hashes": [ + "sha256:11090bd5b5f11d54e1924a11198226971dab6f392c2e5a3c74514857f764b971", + "sha256:1af407c58e7898a511ad01dc6e7c2099493071d939340553686b27513db6478e", + "sha256:2154b9f25c5f13785cb05ce80b2c86e542bc69671193743f29c9f4c791c35db3", + "sha256:234c5424d61d8b263d6d20045f5f32437819627ca57c1ea0c08368013b49824b", + "sha256:29be7767a32df19e2186288cee63e539b386a35139524dc22eeceb244d0b092b", + "sha256:2a2d37105c1d6d936f829de6c1c4ec8d43484d7b8bae8493bdd4267140dce650", + "sha256:3793b09cbd86fe297193b365cbaf58b2f7d1ddeb273213185b2ddbab360e54ae", + "sha256:4acbd566544c33bbc58c7dd264638ff3b91a57d9b162693c37520ea60d13668d", + "sha256:50f96eb9993b6d841aac0addb84ea5f9da81c3fa97e1ec67c11964c8bb4fa0a5", + "sha256:640e8a82b5f69ccd14e7520dd66d1247cf362096586e663ef9b8098cc0cb272b", + "sha256:7554b1acd64a34d78189ab2f691bac967e0d9b38a4f345044552f9dcf3f92149", + "sha256:7e9b01e89712fd988f931721fa36298e06a02eedf87fe7a7fd704d08f74c00f1", + "sha256:9c62a9f7adceaa3110f2ec359c70dddd1640191609e91029e4d307e63fc8a5af", + "sha256:a6c07b3b87455d107b0e4097889e9aba80a0812abf32a322a133af819b85d68a", + "sha256:ae9a2da2b30c0b42029337854f78c71c28d285d254efd5f3be3700d997bfd18e", + "sha256:b845a97f8366cc4aca197c3b04304cc301d9250518123155732da6a0e0575b49", + "sha256:cc0fb1851b59473d2fa2f257f1e35740875af3f402c4575b4115028234e6f2eb", + "sha256:e21a9fe1a416463ff11ae037766fe410526c95700b9e545372475d2361cc951e", + "sha256:f7eb83fb36755edcbeb76fb367104efdf980307536c38ef610cb2e1a321defe0" + ], + "index": "pypi", + "version": "==1.7.0" + }, + "six": { + "hashes": [ + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.15.0" + }, + "sqlakeyset": { + "hashes": [ + "sha256:064355bfd30515012dea96dfec4e7fddab985ffd5bb57f517b60c47552a041ce", + "sha256:1aa250f0b124dd1a34e951542bcd6d1b858c4677846b3ca21aa1dabf54b8431f" + ], + "index": "pypi", + "version": "==0.1.1579837191" + }, + "sqlalchemy": { + "hashes": [ + "sha256:128bc917ed20d78143a45024455ff0aed7d3b96772eba13d5dbaf9cc57e5c41b", + "sha256:156a27548ba4e1fed944ff9fcdc150633e61d350d673ae7baaf6c25c04ac1f71", + "sha256:27e2efc8f77661c9af2681755974205e7462f1ae126f498f4fe12a8b24761d15", + "sha256:2a12f8be25b9ea3d1d5b165202181f2b7da4b3395289000284e5bb86154ce87c", + "sha256:31c043d5211aa0e0773821fcc318eb5cbe2ec916dfbc4c6eea0c5188971988eb", + "sha256:65eb3b03229f684af0cf0ad3bcc771970c1260a82a791a8d07bffb63d8c95bcc", + "sha256:6cd157ce74a911325e164441ff2d9b4e244659a25b3146310518d83202f15f7a", + "sha256:703c002277f0fbc3c04d0ae4989a174753a7554b2963c584ce2ec0cddcf2bc53", + "sha256:869bbb637de58ab0a912b7f20e9192132f9fbc47fc6b5111cd1e0f6cdf5cf9b0", + "sha256:8a0e0cd21da047ea10267c37caf12add400a92f0620c8bc09e4a6531a765d6d7", + "sha256:8d01e949a5d22e5c4800d59b50617c56125fc187fbeb8fa423e99858546de616", + "sha256:925b4fe5e7c03ed76912b75a9a41dfd682d59c0be43bce88d3b27f7f5ba028fb", + "sha256:9cb1819008f0225a7c066cac8bb0cf90847b2c4a6eb9ebb7431dbd00c56c06c5", + "sha256:a87d496884f40c94c85a647c385f4fd5887941d2609f71043e2b73f2436d9c65", + "sha256:a9030cd30caf848a13a192c5e45367e3c6f363726569a56e75dc1151ee26d859", + "sha256:a9e75e49a0f1583eee0ce93270232b8e7bb4b1edc89cc70b07600d525aef4f43", + "sha256:b50f45d0e82b4562f59f0e0ca511f65e412f2a97d790eea5f60e34e5f1aabc9a", + "sha256:b7878e59ec31f12d54b3797689402ee3b5cfcb5598f2ebf26491732758751908", + "sha256:ce1ddaadee913543ff0154021d31b134551f63428065168e756d90bdc4c686f5", + "sha256:ce2646e4c0807f3461be0653502bb48c6e91a5171d6e450367082c79e12868bf", + "sha256:ce6c3d18b2a8ce364013d47b9cad71db815df31d55918403f8db7d890c9d07ae", + "sha256:e4e2664232005bd306f878b0f167a31f944a07c4de0152c444f8c61bbe3cfb38", + "sha256:e8aa395482728de8bdcca9cc0faf3765ab483e81e01923aaa736b42f0294f570", + "sha256:eb4fcf7105bf071c71068c6eee47499ab8d4b8f5a11fc35147c934f0faa60f23", + "sha256:ed375a79f06cad285166e5be74745df1ed6845c5624aafadec4b7a29c25866ef", + "sha256:f35248f7e0d63b234a109dd72fbfb4b5cb6cb6840b221d0df0ecbf54ab087654", + "sha256:f502ef245c492b391e0e23e94cba030ab91722dcc56963c85bfd7f3441ea2bbe", + "sha256:fe01bac7226499aedf472c62fa3b85b2c619365f3f14dd222ffe4f3aa91e5f98" + ], + "index": "pypi", + "version": "==1.3.17" + }, + "stac-pydantic": { + "hashes": [ + "sha256:37a5fe9b962377fde357c3abba12c317f41416d2cf87038d223db0d4cf6df62f", + "sha256:9dd29116e4a00a51813417288845b28c169d32eb25be5e6f3cb9fbdc88959db9" + ], + "index": "pypi", + "version": "==1.0.3" + }, + "starlette": { + "hashes": [ + "sha256:04fe51d86fd9a594d9b71356ed322ccde5c9b448fc716ac74155e5821a922f8d", + "sha256:0fb4b38d22945b46acb880fedee7ee143fd6c0542992501be8c45c0ed737dd1a" + ], + "markers": "python_version >= '3.6'", + "version": "==0.13.4" + }, + "ujson": { + "hashes": [ + "sha256:019a17e7162f26e264f1645bb41630f7103d178c092ea4bb8f3b16126c3ea210", + "sha256:0379ffc7484b862a292e924c15ad5f1c5306d4271e2efd162144812afb08ff97", + "sha256:0959a5b569e192459b492b007e3fd63d8f4b4bcb4f69dcddca850a9b9dfe3e7a", + "sha256:0e2352b60c4ac4fc75b723435faf36ef5e7f3bfb988adb4d589b5e0e6e1d90aa", + "sha256:0f33359908df32033195bfdd59ba2bfb90a23cb280ef9a0ba11e5013a53d7fd9", + "sha256:154f778f0b028390067aaedce8399730d4f528a16a1c214fe4eeb9c4e4f51810", + "sha256:3bd791d17a175c1c6566aeaec1755b58e3f021fe9bb62f10f02b656b299199f5", + "sha256:634c206f4fb3be7e4523768c636d2dd41cb9c7130e2d219ef8305b8fb6f4838e", + "sha256:670018d4ab4b0755a7234a9f4791723abcd0506c0eed33b2ed50579c4aff31f2", + "sha256:9c68557da3e3ad57e0105aceba0cce5f8f7cd07d207c3860e59c0b3044532830", + "sha256:a32f2def62b10e8a19084d17d40363c4da1ac5f52d300a9e99d7efb49fe5f34a", + "sha256:bea2958c7b5bf4f191f0def751b6f7c8b208edb5f7277e21776329f2ca042385", + "sha256:c04d253fec814657fd9f150ef2333dbd0bc6f46208355aa753a29e0696b7fa7e", + "sha256:c841a6450d64c24c64cbcca429bab22cdb6daef5eaddfdfebe798a5e9e5aff4c", + "sha256:e0199849d61cc6418f94d52a314c6a27524d65e82174d2a043fb718f73d1520d", + "sha256:f40bb0d0cb534aad3e24884cf864bda7a71eb5984bd1da61d1711bbfb3be2c38", + "sha256:f854702a9aff3a445f4a0b715d240f2a3d84014d8ae8aad05a982c7ffab12525" + ], + "version": "==3.0.0" + }, + "urllib3": { + "hashes": [ + "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527", + "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.25.9" + }, + "uvicorn": { + "hashes": [ + "sha256:50577d599775dac2301bac8bd5b540d19a9560144143c5bdab13cba92783b6e7", + "sha256:596eaa8645b6dbc24d6610e335f8ddf5f925b4c4b86fdc7146abb0bf0da65d17" + ], + "index": "pypi", + "version": "==0.11.5" + }, + "uvloop": { + "hashes": [ + "sha256:08b109f0213af392150e2fe6f81d33261bb5ce968a288eb698aad4f46eb711bd", + "sha256:123ac9c0c7dd71464f58f1b4ee0bbd81285d96cdda8bc3519281b8973e3a461e", + "sha256:4315d2ec3ca393dd5bc0b0089d23101276778c304d42faff5dc4579cb6caef09", + "sha256:4544dcf77d74f3a84f03dd6278174575c44c67d7165d4c42c71db3fdc3860726", + "sha256:afd5513c0ae414ec71d24f6f123614a80f3d27ca655a4fcf6cabe50994cc1891", + "sha256:b4f591aa4b3fa7f32fb51e2ee9fea1b495eb75b0b3c8d0ca52514ad675ae63f7", + "sha256:bcac356d62edd330080aed082e78d4b580ff260a677508718f88016333e2c9c5", + "sha256:e7514d7a48c063226b7d06617cbb12a14278d4323a065a8d46a7962686ce2e95", + "sha256:f07909cd9fc08c52d294b1570bba92186181ca01fe3dc9ffba68955273dd7362" + ], + "markers": "sys_platform != 'win32' and sys_platform != 'cygwin' and platform_python_implementation != 'PyPy'", + "version": "==0.14.0" + }, + "websockets": { + "hashes": [ + "sha256:0e4fb4de42701340bd2353bb2eee45314651caa6ccee80dbd5f5d5978888fed5", + "sha256:1d3f1bf059d04a4e0eb4985a887d49195e15ebabc42364f4eb564b1d065793f5", + "sha256:20891f0dddade307ffddf593c733a3fdb6b83e6f9eef85908113e628fa5a8308", + "sha256:295359a2cc78736737dd88c343cd0747546b2174b5e1adc223824bcaf3e164cb", + "sha256:2db62a9142e88535038a6bcfea70ef9447696ea77891aebb730a333a51ed559a", + "sha256:3762791ab8b38948f0c4d281c8b2ddfa99b7e510e46bd8dfa942a5fff621068c", + "sha256:3db87421956f1b0779a7564915875ba774295cc86e81bc671631379371af1170", + "sha256:3ef56fcc7b1ff90de46ccd5a687bbd13a3180132268c4254fc0fa44ecf4fc422", + "sha256:4f9f7d28ce1d8f1295717c2c25b732c2bc0645db3215cf757551c392177d7cb8", + "sha256:5c01fd846263a75bc8a2b9542606927cfad57e7282965d96b93c387622487485", + "sha256:5c65d2da8c6bce0fca2528f69f44b2f977e06954c8512a952222cea50dad430f", + "sha256:751a556205d8245ff94aeef23546a1113b1dd4f6e4d102ded66c39b99c2ce6c8", + "sha256:7ff46d441db78241f4c6c27b3868c9ae71473fe03341340d2dfdbe8d79310acc", + "sha256:965889d9f0e2a75edd81a07592d0ced54daa5b0785f57dc429c378edbcffe779", + "sha256:9b248ba3dd8a03b1a10b19efe7d4f7fa41d158fdaa95e2cf65af5a7b95a4f989", + "sha256:9bef37ee224e104a413f0780e29adb3e514a5b698aabe0d969a6ba426b8435d1", + "sha256:c1ec8db4fac31850286b7cd3b9c0e1b944204668b8eb721674916d4e28744092", + "sha256:c8a116feafdb1f84607cb3b14aa1418424ae71fee131642fc568d21423b51824", + "sha256:ce85b06a10fc65e6143518b96d3dca27b081a740bae261c2fb20375801a9d56d", + "sha256:d705f8aeecdf3262379644e4b55107a3b55860eb812b673b28d0fbc347a60c55", + "sha256:e898a0863421650f0bebac8ba40840fc02258ef4714cb7e1fd76b6a6354bda36", + "sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b" + ], + "markers": "python_full_version >= '3.6.1'", + "version": "==8.1" + } + }, + "develop": { + "attrs": { + "hashes": [ + "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c", + "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==19.3.0" + }, + "certifi": { + "hashes": [ + "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3", + "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41" + ], + "version": "==2020.6.20" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "coverage": { + "hashes": [ + "sha256:00f1d23f4336efc3b311ed0d807feb45098fc86dee1ca13b3d6768cdab187c8a", + "sha256:01333e1bd22c59713ba8a79f088b3955946e293114479bbfc2e37d522be03355", + "sha256:0cb4be7e784dcdc050fc58ef05b71aa8e89b7e6636b99967fadbdba694cf2b65", + "sha256:0e61d9803d5851849c24f78227939c701ced6704f337cad0a91e0972c51c1ee7", + "sha256:1601e480b9b99697a570cea7ef749e88123c04b92d84cedaa01e117436b4a0a9", + "sha256:2742c7515b9eb368718cd091bad1a1b44135cc72468c731302b3d641895b83d1", + "sha256:2d27a3f742c98e5c6b461ee6ef7287400a1956c11421eb574d843d9ec1f772f0", + "sha256:402e1744733df483b93abbf209283898e9f0d67470707e3c7516d84f48524f55", + "sha256:5c542d1e62eece33c306d66fe0a5c4f7f7b3c08fecc46ead86d7916684b36d6c", + "sha256:5f2294dbf7875b991c381e3d5af2bcc3494d836affa52b809c91697449d0eda6", + "sha256:6402bd2fdedabbdb63a316308142597534ea8e1895f4e7d8bf7476c5e8751fef", + "sha256:66460ab1599d3cf894bb6baee8c684788819b71a5dc1e8fa2ecc152e5d752019", + "sha256:782caea581a6e9ff75eccda79287daefd1d2631cc09d642b6ee2d6da21fc0a4e", + "sha256:79a3cfd6346ce6c13145731d39db47b7a7b859c0272f02cdb89a3bdcbae233a0", + "sha256:7a5bdad4edec57b5fb8dae7d3ee58622d626fd3a0be0dfceda162a7035885ecf", + "sha256:8fa0cbc7ecad630e5b0f4f35b0f6ad419246b02bc750de7ac66db92667996d24", + "sha256:a027ef0492ede1e03a8054e3c37b8def89a1e3c471482e9f046906ba4f2aafd2", + "sha256:a3f3654d5734a3ece152636aad89f58afc9213c6520062db3978239db122f03c", + "sha256:a82b92b04a23d3c8a581fc049228bafde988abacba397d57ce95fe95e0338ab4", + "sha256:acf3763ed01af8410fc36afea23707d4ea58ba7e86a8ee915dfb9ceff9ef69d0", + "sha256:adeb4c5b608574a3d647011af36f7586811a2c1197c861aedb548dd2453b41cd", + "sha256:b83835506dfc185a319031cf853fa4bb1b3974b1f913f5bb1a0f3d98bdcded04", + "sha256:bb28a7245de68bf29f6fb199545d072d1036a1917dca17a1e75bbb919e14ee8e", + "sha256:bf9cb9a9fd8891e7efd2d44deb24b86d647394b9705b744ff6f8261e6f29a730", + "sha256:c317eaf5ff46a34305b202e73404f55f7389ef834b8dbf4da09b9b9b37f76dd2", + "sha256:dbe8c6ae7534b5b024296464f387d57c13caa942f6d8e6e0346f27e509f0f768", + "sha256:de807ae933cfb7f0c7d9d981a053772452217df2bf38e7e6267c9cbf9545a796", + "sha256:dead2ddede4c7ba6cb3a721870f5141c97dc7d85a079edb4bd8d88c3ad5b20c7", + "sha256:dec5202bfe6f672d4511086e125db035a52b00f1648d6407cc8e526912c0353a", + "sha256:e1ea316102ea1e1770724db01998d1603ed921c54a86a2efcb03428d5417e489", + "sha256:f90bfc4ad18450c80b024036eaf91e4a246ae287701aaa88eaebebf150868052" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==5.1" + }, + "idna": { + "hashes": [ + "sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb", + "sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.9" + }, + "more-itertools": { + "hashes": [ + "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5", + "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2" + ], + "markers": "python_version >= '3.5'", + "version": "==8.4.0" + }, + "packaging": { + "hashes": [ + "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8", + "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.4" + }, + "pluggy": { + "hashes": [ + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.13.1" + }, + "py": { + "hashes": [ + "sha256:a673fa23d7000440cc885c17dbd34fafcb7d7a6e230b29f6766400de36a33c44", + "sha256:f3b3a4c36512a4c4f024041ab51866f11761cc169670204b235f6b20523d4e6b" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.8.2" + }, + "pyparsing": { + "hashes": [ + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.4.7" + }, + "pytest": { + "hashes": [ + "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1", + "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8" + ], + "index": "pypi", + "version": "==5.4.3" + }, + "pytest-asyncio": { + "hashes": [ + "sha256:475bd2f3dc0bc11d2463656b3cbaafdbec5a47b47508ea0b329ee693040eebd2" + ], + "index": "pypi", + "version": "==0.12.0" + }, + "pytest-cov": { + "hashes": [ + "sha256:1a629dc9f48e53512fcbfda6b07de490c374b0c83c55ff7a1720b3fccff0ac87", + "sha256:6e6d18092dce6fad667cd7020deed816f858ad3b49d5b5e2b1cc1c97a4dba65c" + ], + "index": "pypi", + "version": "==2.10.0" + }, + "requests": { + "hashes": [ + "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b", + "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898" + ], + "version": "==2.24.0" + }, + "six": { + "hashes": [ + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.15.0" + }, + "urllib3": { + "hashes": [ + "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527", + "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.25.9" + }, + "wcwidth": { + "hashes": [ + "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", + "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" + ], + "version": "==0.2.5" + } + } +} diff --git a/README.md b/README.md new file mode 100644 index 000000000..54915f2de --- /dev/null +++ b/README.md @@ -0,0 +1,30 @@ +# arturo-stac-api ![arturo-stac-api](https://github.com/arturo-ai/arturo-stac-api/workflows/arturo-stac-api/badge.svg) +FastAPI/postgres implementation of the [STAC API specification](https://github.com/radiantearth/stac-api-spec). + +## Project Structure +``` +. +├── alembic # Database migrations +│   └── versions # Migration versions +├── stac_api +│   ├── clients # Database CRUD +│   ├── models # Database and API data models +│   ├── resources # API endpoints +│   └── utils # FastAPI dependencies +└── tests + ├── clients # CRUD test cases + ├── data # Test data + └── resources # API test cases +``` + +## Local Development +Use docker-compose to deploy the application, migrate the database, and ingest an example collection: +``` +docker-compose build +docker-compose up +``` + +Run tests (the `docker-compose` stack must be running): +``` +make test +``` \ No newline at end of file diff --git a/VERSION b/VERSION new file mode 100644 index 000000000..6c6aa7cb0 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +0.1.0 \ No newline at end of file diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 000000000..7dec63538 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,85 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +;sqlalchemy.url = postgresql://alex:password@localhost:5432/postgres + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks=black +# black.type=console_scripts +# black.entrypoint=black +# black.options=-l 79 + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 000000000..98e4f9c44 --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 000000000..ccdf8ed45 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,92 @@ +import os +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +def get_connection_url() -> str: + """ + Get connection URL from environment variables (see `.env.example`) + """ + postgres_user = os.environ['POSTGRES_USER'] + postgres_pass = os.environ['POSTGRES_PASS'] + postgres_host = os.environ['POSTGRES_HOST'] + postgres_port = os.environ['POSTGRES_PORT'] + postgres_dbname = os.environ['POSTGRES_DBNAME'] + return f"postgresql://{postgres_user}:{postgres_pass}@{postgres_host}:{postgres_port}/{postgres_dbname}" + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = get_connection_url() + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + configuration = config.get_section(config.config_ini_section) + configuration['sqlalchemy.url'] = get_connection_url() + connectable = engine_from_config( + configuration, + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/131aab4d9e49_create_tables.py b/alembic/versions/131aab4d9e49_create_tables.py new file mode 100644 index 000000000..e0678d30a --- /dev/null +++ b/alembic/versions/131aab4d9e49_create_tables.py @@ -0,0 +1,72 @@ +"""create initial schema + +Revision ID: 131aab4d9e49 +Revises: +Create Date: 2020-02-09 13:03:09.336631 + +""" +from alembic import op +import sqlalchemy as sa + +from sqlalchemy.dialects.postgresql import JSONB +from geoalchemy2.types import Geometry + + +# revision identifiers, used by Alembic. +revision = '131aab4d9e49' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute("CREATE SCHEMA data") + op.execute("CREATE EXTENSION IF NOT EXISTS postgis") + + # Create collections table + op.create_table( + "collections", + sa.Column("id", sa.VARCHAR(1024), nullable=False, primary_key=True), + sa.Column("stac_version", sa.VARCHAR(300)), + sa.Column("title", sa.VARCHAR(1024)), + sa.Column("description", sa.VARCHAR(1024), nullable=False), + sa.Column("keywords", sa.ARRAY(sa.VARCHAR(300))), + sa.Column("version", sa.VARCHAR(300)), + sa.Column("license", sa.VARCHAR(300), nullable=False), + sa.Column("providers", JSONB), + sa.Column("extent", JSONB), + sa.Column("links", JSONB, nullable=True), + schema="data" + ) + + # Create items table + op.create_table( + "items", + sa.Column("id", sa.VARCHAR(1024), nullable=False, primary_key=True), + sa.Column("geometry", Geometry("POLYGON", srid=4326, spatial_index=True)), + sa.Column("bbox", sa.ARRAY(sa.NUMERIC), nullable=False), + sa.Column("properties", JSONB), + sa.Column("assets", JSONB), + sa.Column("collection_id", sa.VARCHAR(1024), nullable=False, index=True), + # These are usually in properties but defined as their own fields for indexing + sa.Column("datetime", sa.TIMESTAMP, nullable=False, index=True), + sa.Column("links", JSONB, nullable=True), + sa.ForeignKeyConstraint(["collection_id"], ["data.collections.id"]), + schema="data" + ) + + # Create pagination token table + op.create_table( + "tokens", + sa.Column("id", sa.VARCHAR(100), nullable=False, primary_key=True), + sa.Column("keyset", sa.VARCHAR(1000), nullable=False), + schema="data" + ) + + +def downgrade(): + op.execute("DROP TABLE data.items") + op.execute("DROP TABLE data.collections") + op.execute("DROP TABLE data.paging_tokens") + op.execute("DROP SCHEMA data") + op.execute("DROP EXTENSION IF EXISTS postgis") diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..7910aca76 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,57 @@ +version: '3' + +services: + app: + container_name: stac-api + build: + context: . + dockerfile: Dockerfile + environment: + - APP_HOST=0.0.0.0 + - APP_PORT=8081 + - RELOAD=true + - ENVIRONMENT=local + - POSTGRES_USER=username + - POSTGRES_PASS=password + - POSTGRES_DBNAME=postgis + - POSTGRES_HOST_READER=host.docker.internal + - POSTGRES_HOST_WRITER=host.docker.internal + - POSTGRES_PORT=5432 + ports: + - "8081:8081" + volumes: + - ./:/app + depends_on: + - database + + database: + container_name: stac-db + image: kartoza/postgis:latest + environment: + - POSTGRES_USER=username + - POSTGRES_PASS=password + - POSTGRES_DBNAME=postgis + - POSTGRES_HOST=localhost + - POSTGRES_PORT=5432 + - ALLOW_IP_RANGE=0.0.0.0/0 + ports: + - "5432:5432" + + migration: +# image: stac-api:latest + build: + context: . + dockerfile: Dockerfile + environment: + - ENVIRONMENT=development + - POSTGRES_USER=username + - POSTGRES_PASS=password + - POSTGRES_DBNAME=postgis + - POSTGRES_HOST=host.docker.internal + - POSTGRES_PORT=5432 + command: > + bash -c "git clone https://github.com/vishnubob/wait-for-it.git && + ./wait-for-it/wait-for-it.sh localhost:5432 -- alembic upgrade head && + python scripts/ingest_joplin.py" + depends_on: + - database \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..d8c8c5647 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +testpaths = tests +addopts = -sv --cov=stac_api --cov-fail-under=85 --cov-report=term-missing \ No newline at end of file diff --git a/scripts/ingest_joplin.py b/scripts/ingest_joplin.py new file mode 100644 index 000000000..bccfdd1fd --- /dev/null +++ b/scripts/ingest_joplin.py @@ -0,0 +1,27 @@ +from urllib.parse import urljoin + +import requests + + +bucket = "arturo-stac-api-test-data" +app_host = "http://host.docker.internal:8081" + + +def ingest_joplin_data(): + r = requests.get(f"https://{bucket}.s3.amazonaws.com/joplin/collection.json") + collection = r.json() + + r = requests.post(urljoin(app_host, "/collections"), json=collection) + r.raise_for_status() + + r = requests.get(f"https://{bucket}.s3.amazonaws.com/joplin/index.geojson") + index = r.json() + for feat in index["features"]: + r = requests.post( + urljoin(app_host, f"/collections/{collection['id']}/items"), json=feat + ) + r.raise_for_status() + + +if __name__ == "__main__": + ingest_joplin_data() diff --git a/stac_api/__init__.py b/stac_api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/stac_api/app.py b/stac_api/app.py new file mode 100644 index 000000000..ebd940a5f --- /dev/null +++ b/stac_api/app.py @@ -0,0 +1,35 @@ +from fastapi import FastAPI +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from . import settings +from .resources import mgmt, collection, conformance, item +from .utils import dependencies + + +app = FastAPI() +app.debug = settings.DEBUG +app.include_router(mgmt.router) +app.include_router(conformance.router) +app.include_router(collection.router) +app.include_router(item.router) + + +@app.on_event("startup") +async def on_startup(): + """Create database engines and sessions on startup""" + dependencies.ENGINE_READER = create_engine(settings.SQLALCHEMY_DATABASE_READER) + dependencies.ENGINE_WRITER = create_engine(settings.SQLALCHEMY_DATABASE_WRITER) + dependencies.DB_READER = sessionmaker( + autocommit=False, autoflush=False, bind=dependencies.ENGINE_READER + ) + dependencies.DB_WRITER = sessionmaker( + autocommit=False, autoflush=False, bind=dependencies.ENGINE_WRITER + ) + + +@app.on_event("shutdown") +async def on_shutdown(): + """Dispose of database engines and sessions on app shutdown""" + dependencies.ENGINE_READER.dispose() + dependencies.ENGINE_WRITER.dispose() diff --git a/stac_api/clients/__init__.py b/stac_api/clients/__init__.py new file mode 100644 index 000000000..e421f5576 --- /dev/null +++ b/stac_api/clients/__init__.py @@ -0,0 +1,2 @@ +from .collection_crud import collection_crud_client_factory +from .item_crud import item_crud_client_factory diff --git a/stac_api/clients/base_crud.py b/stac_api/clients/base_crud.py new file mode 100644 index 000000000..9c1170411 --- /dev/null +++ b/stac_api/clients/base_crud.py @@ -0,0 +1,106 @@ +from dataclasses import dataclass +import logging +from typing import Union + +import psycopg2 +import sqlalchemy as sa +from sqlalchemy.orm import Query + +from .. import errors +from ..models import database, schemas + +logger = logging.getLogger(__name__) + + +@dataclass +class BaseCrudClient: + """Database CRUD operations on the defined table""" + + reader_session: sa.orm.Session + writer_session: sa.orm.Session + table: database.BaseModel + + @staticmethod + def row_exists(query: Query) -> bool: + """Check if a record exists from the sqlalchemy query object""" + return True if query.scalar() else False + + def commit(self) -> None: + """Commit both reader and writer sessions to keep them in sync, rolling back on psycopg2 errors""" + try: + self.reader_session.commit() + self.writer_session.commit() + except sa.exc.IntegrityError as e: + self.reader_session.rollback() + self.writer_session.rollback() + logger.error(e.orig.pgerror, exc_info=True) + # Explicitly catch foreign key errors to be reraised by the API as validation errors + if isinstance(e.orig, psycopg2.errors.ForeignKeyViolation): + raise errors.ForeignKeyError(message=e.orig.pgerror) + raise errors.DatabaseError(message=e.orig.pgerror) from e + except: + error_message = "Unhandled database exception during commit" + logger.error(error_message, exc_info=True) + raise errors.DatabaseError(message=error_message) + + def lookup_id(self, item_id: str) -> Query: + """Create a query to access a single record from the table""" + try: + query = self.reader_session.query(self.table).filter( + self.table.id == item_id + ) + except: + error_message = f"Unhandled database during ID lookup" + logger.error(error_message, exc_info=True) + raise errors.DatabaseError(message=error_message) + if not self.row_exists(query): + error_message = f"Row {item_id} does not exist" + logger.warning(error_message) + raise errors.NotFoundError(message=error_message) + return query + + def create( + self, item: Union[schemas.Collection, schemas.Item] + ) -> Union[database.Collection, database.Item]: + """Create a single record for the table""" + try: + self.lookup_id(item.id) + error_message = f"Row {item.id} already exists" + logger.error(error_message, exc_info=True) + raise errors.ConflictError(message=error_message) + except errors.NotFoundError: + row_data = self.table.from_schema(item) + self.writer_session.add(row_data) + self.commit() + return row_data + + def read(self, item_id: str) -> Union[database.Collection, database.Item]: + """Read a single record from the table""" + row_data = self.lookup_id(item_id).first() + return row_data + + def update( + self, item: Union[schemas.Collection, schemas.Item] + ) -> Union[database.Collection, database.Item]: + """Create a single record if it does not exist or update an existing record""" + try: + query = self.lookup_id(item.id) + update_data = self.table.get_database_model(item) + # SQLAlchemy orm updates don't seem to like geoalchemy types + update_data.pop("geometry", None) + query.update(update_data) + self.commit() + return self.table.from_schema(item) + except errors.NotFoundError: + row_data = self.table.from_schema(item) + self.writer_session.add(row_data) + self.commit() + return row_data + + def delete(self, item_id: str) -> Union[database.Collection, database.Item]: + """Delete a single record from the table""" + query = self.lookup_id(item_id) + row_data = query.first() + query.delete() + self.commit() + return row_data diff --git a/stac_api/clients/collection_crud.py b/stac_api/clients/collection_crud.py new file mode 100644 index 000000000..fa1456083 --- /dev/null +++ b/stac_api/clients/collection_crud.py @@ -0,0 +1,75 @@ +from dataclasses import dataclass +import logging +from typing import List, Tuple + +from fastapi import Depends +from sqlalchemy.orm import Session +from sqlakeyset import get_page, Page + +from .base_crud import BaseCrudClient +from .tokens import PaginationTokenClient, pagination_token_client_factory +from .. import errors +from ..models import database +from ..utils.dependencies import database_reader_factory, database_writer_factory + +logger = logging.getLogger(__name__) + + +@dataclass +class CollectionCrudClient(BaseCrudClient): + pagination_client: PaginationTokenClient + + def get_all_collections(self) -> List[database.Collection]: + """Read all collections from the database""" + try: + items = self.reader_session.query(self.table).all() + except: + error_message = "Unhandled database error when getting item collection" + logger.error(error_message, exc_info=True) + raise errors.DatabaseError(message=error_message) + return items + + def get_item_collection( + self, collection_id: str, limit: int, token: str = None + ) -> Tuple[Page, int]: + """Read an item collection from the database""" + try: + collection_children = ( + self.lookup_id(collection_id) + .first() + .children.order_by(database.Item.datetime.desc(), database.Item.id) + ) + count = collection_children.count() + token = self.pagination_client.get(token) if token else token + page = get_page(collection_children, per_page=limit, page=(token or False)) + # Create dynamic attributes for each page + page.next = ( + self.pagination_client.insert(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None + ) + page.previous = ( + self.pagination_client.insert(keyset=page.paging.bookmark_previous) + if page.paging.has_previous + else None + ) + except errors.NotFoundError: + raise + except: + error_message = "Unhandled database error when getting collection children" + logger.error(error_message, exc_info=True) + raise errors.DatabaseError(message=error_message) + return page, count + + +def collection_crud_client_factory( + reader_session: Session = Depends(database_reader_factory), + writer_session: Session = Depends(database_writer_factory), + pagination_client: PaginationTokenClient = Depends(pagination_token_client_factory), +) -> CollectionCrudClient: + return CollectionCrudClient( + reader_session=reader_session, + writer_session=writer_session, + table=database.Collection, + pagination_client=pagination_client, + ) diff --git a/stac_api/clients/item_crud.py b/stac_api/clients/item_crud.py new file mode 100644 index 000000000..385d8dac4 --- /dev/null +++ b/stac_api/clients/item_crud.py @@ -0,0 +1,145 @@ +from dataclasses import dataclass +import logging +from typing import List, Tuple, Union + +from fastapi import Depends +import geoalchemy2 as ga +import sqlalchemy as sa +from sqlalchemy.orm import Session +from sqlakeyset import get_page, Page + +from .base_crud import BaseCrudClient +from .collection_crud import CollectionCrudClient, collection_crud_client_factory +from .tokens import PaginationTokenClient, pagination_token_client_factory +from ..errors import DatabaseError +from ..models import database, schemas +from ..utils.dependencies import database_writer_factory, database_reader_factory + +logger = logging.getLogger(__name__) + +NumType = Union[float, int] + + +@dataclass +class ItemCrudClient(BaseCrudClient): + collection_crud: CollectionCrudClient + pagination_client: PaginationTokenClient + + def stac_search(self, search_request: schemas.STACSearch) -> Tuple[Page, int]: + token = ( + self.pagination_client.get(search_request.token) + if search_request.token + else False + ) + query = self.reader_session.query(self.table) + + # Filter by collection + if search_request.collections: + collection_filter = sa.or_( + *[ + self.table.collection_id == col_id + for col_id in search_request.collections + ] + ) + query = query.filter(collection_filter) + + # Sort + if search_request.sortby: + sort_fields = [ + getattr(self.table.get_field(sort.field), sort.direction.value)() + for sort in search_request.sortby + ] + # Add id to end of sort to ensure unique keyset + sort_fields.append(self.table.id) + query = query.order_by(*sort_fields) + else: + # Default sort is date and id + query = query.order_by(self.table.datetime.desc(), self.table.id) + + # Ignore other parameters if ID is present + if search_request.ids: + id_filter = sa.or_(*[self.table.id == i for i in search_request.ids]) + try: + items = query.filter(id_filter).order_by(self.table.id) + page = get_page(items, per_page=search_request.limit, page=token) + page.next = ( + self.pagination_client.insert(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None + ) + page.previous = ( + self.pagination_client.insert(keyset=page.paging.bookmark_previous) + if page.paging.has_previous + else None + ) + except: + error_message = ( + "Unhandled database error when searching for items by id" + ) + logger.error(error_message, exc_info=True) + raise DatabaseError(message=error_message) + return page, len(search_request.ids) + + # Spatial query + poly = search_request.polygon() + if poly: + filter_geom = ga.shape.from_shape(poly, srid=4326) + query = query.filter( + ga.func.ST_Intersects(self.table.geometry, filter_geom) + ) + + # Temporal query + if search_request.datetime: + # Two tailed query (between) + if ".." not in search_request.datetime: + query = query.filter( + self.table.datetime.between(*search_request.datetime) + ) + # All items after the start date + if search_request.datetime[0] != "..": + query = query.filter(self.table.datetime >= search_request.datetime[0]) + # All items before the end date + if search_request.datetime[1] != "..": + query = query.filter(self.table.datetime <= search_request.datetime[1]) + + # Query fields + if search_request.query: + for (field_name, expr) in search_request.query.items(): + field = self.table.get_field(field_name) + for (op, value) in expr.items(): + query = query.filter(op.operator(field, value)) + + try: + count = query.count() + page = get_page(query, per_page=search_request.limit, page=token) + # Create dynamic attributes for each page + page.next = ( + self.pagination_client.insert(keyset=page.paging.bookmark_next) + if page.paging.has_next + else None + ) + page.previous = ( + self.pagination_client.insert(keyset=page.paging.bookmark_previous) + if page.paging.has_previous + else None + ) + except: + error_message = "Unhandled database error during spatial/temporal query" + logger.error(error_message, exc_info=True) + raise DatabaseError(message=error_message) + return page, count + + +def item_crud_client_factory( + reader_session: Session = Depends(database_reader_factory), + writer_session: Session = Depends(database_writer_factory), + collection_crud: CollectionCrudClient = Depends(collection_crud_client_factory), + pagination_client: PaginationTokenClient = Depends(pagination_token_client_factory), +) -> ItemCrudClient: + return ItemCrudClient( + reader_session=reader_session, + writer_session=writer_session, + collection_crud=collection_crud, + table=database.Item, + pagination_client=pagination_client, + ) diff --git a/stac_api/clients/tokens.py b/stac_api/clients/tokens.py new file mode 100644 index 000000000..45481488a --- /dev/null +++ b/stac_api/clients/tokens.py @@ -0,0 +1,47 @@ +from base64 import urlsafe_b64encode +from dataclasses import dataclass +import os + +from fastapi import Depends +from sqlalchemy.orm import Session + +from .base_crud import BaseCrudClient +from ..models import database + +from ..errors import DatabaseError +from ..utils.dependencies import database_reader_factory, database_writer_factory + + +@dataclass +class PaginationTokenClient(BaseCrudClient): + def insert(self, keyset: str, tries: int = 0) -> str: + """Insert a keyset into the database""" + # uid has collision chance of 1e-7 percent + uid = urlsafe_b64encode(os.urandom(6)).decode() + try: + token = database.PaginationToken(id=uid, keyset=keyset) + self.writer_session.add(token) + self.commit() + return token.id + except DatabaseError: + # Try again if uid already exists in the database + # TODO: Explicitely check for ConflictError (if insert fails for other reasons it should be raised) + self.insert(keyset, tries=tries + 1) + if tries > 5: + raise + + def get(self, token_id: str) -> str: + """Retrieve a keyset from the database""" + row = self.lookup_id(token_id).first() + return row.keyset + + +def pagination_token_client_factory( + reader_session: Session = Depends(database_reader_factory), + writer_session: Session = Depends(database_writer_factory), +) -> PaginationTokenClient: + return PaginationTokenClient( + reader_session=reader_session, + writer_session=writer_session, + table=database.PaginationToken, + ) diff --git a/stac_api/errors.py b/stac_api/errors.py new file mode 100644 index 000000000..c56074d8c --- /dev/null +++ b/stac_api/errors.py @@ -0,0 +1,21 @@ +from dataclasses import dataclass + + +@dataclass +class ConflictError(Exception): + message: str + + +@dataclass +class NotFoundError(Exception): + message: str + + +@dataclass +class ForeignKeyError(Exception): + message: str + + +@dataclass +class DatabaseError(Exception): + message: str diff --git a/stac_api/models/__init__.py b/stac_api/models/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/stac_api/models/database.py b/stac_api/models/database.py new file mode 100644 index 000000000..303dc963f --- /dev/null +++ b/stac_api/models/database.py @@ -0,0 +1,134 @@ +from datetime import datetime +import json +from typing import Optional + +import geoalchemy2 as ga +from shapely.geometry import shape +import sqlalchemy as sa +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.dialects.postgresql import JSONB +from stac_pydantic.shared import DATETIME_RFC339 + +from . import schemas +from .. import settings + + +BaseModel = declarative_base() + + +class GeojsonGeometry(ga.Geometry): + """ + Custom geoalchemy type which returns GeoJSON + """ + + from_text = "ST_GeomFromGeoJSON" + + def result_processor(self, dialect: str, coltype): + """Override default processer to return GeoJSON""" + + def process(value: Optional[bytes]): + if value is not None: + geom = ga.shape.to_shape( + ga.elements.WKBElement( + value, srid=self.srid, extended=self.extended + ) + ) + return json.loads(json.dumps(geom.__geo_interface__)) + + return process + + +class Collection(BaseModel): + __tablename__ = "collections" + __table_args__ = {"schema": "data"} + + id = sa.Column(sa.VARCHAR(1024), nullable=False, primary_key=True) + stac_version = sa.Column(sa.VARCHAR(300)) + title = sa.Column(sa.VARCHAR(1024)) + description = sa.Column(sa.VARCHAR(1024), nullable=False) + keywords = sa.Column(sa.VARCHAR(300)) + version = sa.Column(sa.VARCHAR(300)) + license = sa.Column(sa.VARCHAR(300), nullable=False) + providers = sa.Column(JSONB) + extent = sa.Column(JSONB) + links = sa.Column(JSONB) + children = sa.orm.relationship("Item", lazy="dynamic") + + @classmethod + def get_database_model(cls, schema: schemas.Collection) -> dict: + return schema.dict(exclude_none=True) + + @classmethod + def from_schema(cls, schema: schemas.Collection) -> "Collection": + return cls(**cls.get_database_model(schema)) + + +class Item(BaseModel): + __tablename__ = "items" + __table_args__ = {"schema": "data"} + + id = sa.Column(sa.VARCHAR(1024), nullable=False, primary_key=True) + geometry = sa.Column(GeojsonGeometry("POLYGON", srid=4326, spatial_index=True)) + bbox = sa.Column(sa.ARRAY(sa.NUMERIC), nullable=False) + properties = sa.Column(JSONB) + assets = sa.Column(JSONB) + collection_id = sa.Column( + sa.VARCHAR(1024), sa.ForeignKey(Collection.id), nullable=False + ) + parent_collection = sa.orm.relationship("Collection", back_populates="children") + datetime = sa.Column(sa.TIMESTAMP, nullable=False) + links = sa.Column(JSONB) + + @classmethod + def get_database_model(cls, schema: schemas.Item) -> dict: + """Decompose pydantic model to data model""" + indexed_fields = {} + for field in settings.INDEXED_FIELDS: + # Use getattr to accommodate extension namespaces + field_value = getattr(schema.properties, field) + if field == "datetime": + field_value = datetime.strptime(field_value, DATETIME_RFC339) + indexed_fields[field.split(":")[-1]] = field_value + + # Exclude indexed fields from the properties jsonb field + properties = schema.properties.dict(exclude=set(settings.INDEXED_FIELDS)) + now = datetime.utcnow().strftime(DATETIME_RFC339) + if "created" not in properties: + properties["created"] = now + properties["updated"] = now + + return dict( + collection_id=schema.collection, + geometry=ga.shape.from_shape(shape(schema.geometry), 4326), + properties=properties, + **indexed_fields, + **schema.dict( + exclude_none=True, + exclude=set( + settings.FORBIDDEN_FIELDS | {"geometry", "properties", "collection"} + ), + ) + ) + + @classmethod + def from_schema(cls, schema: schemas.Item) -> "Item": + return cls(**cls.get_database_model(schema)) + + @classmethod + def get_field(cls, field_name): + """Get a model field""" + try: + return getattr(cls, field_name) + except AttributeError: + # Use a JSONB field + return cls.properties[(field_name)].cast( + getattr(schemas.QueryableTypes, field_name.name) + ) + + +class PaginationToken(BaseModel): + __tablename__ = "tokens" + __table_args__ = {"schema": "data"} + + id = sa.Column(sa.VARCHAR(100), nullable=False, primary_key=True) + keyset = sa.Column(sa.VARCHAR(1000), nullable=False) diff --git a/stac_api/models/decompose.py b/stac_api/models/decompose.py new file mode 100644 index 000000000..9d743a031 --- /dev/null +++ b/stac_api/models/decompose.py @@ -0,0 +1,82 @@ +import json +from typing import Any, Dict, List, Union +from urllib.parse import urljoin + +import geoalchemy2 as ga +from pydantic.utils import GetterDict +from stac_pydantic.item import ItemProperties +from stac_pydantic.shared import DATETIME_RFC339 + +from .links import CollectionLinks, ItemLinks, filter_links +from ..errors import DatabaseError +from ..settings import INDEXED_FIELDS + + +def resolve_links(links: list, base_url: str) -> List[Dict]: + """ + Convert relative links to absolute links using the specified base url. It would be more appropriate to use a view, + but SQLAlchemy ORM doesn't support this as far as I know. + """ + filtered_links = filter_links(links) + for link in filtered_links: + link.update({"href": urljoin(base_url, link["href"])}) + return filtered_links + + +class ItemGetter(GetterDict): + """ + Custom GetterDict used internally by pydantic ORM mode when decomposing database model to pydantic model. This + object resolves structural differences between the two models, for example: + - relative links stored in the database must be resolved absolute links and inferred links must be added + - ``datetime`` is defined as its own field in the database but as ``item.properties.datetime`` in the stac spec + - ``geometry`` can be one of several formats when exported from the database but the STAC item expects geojson + """ + + @staticmethod + def decode_geom(geom: Union[ga.elements.WKBElement, str, Dict]) -> Dict: + if isinstance(geom, ga.elements.WKBElement): + return json.loads(json.dumps(ga.shape.to_shape(geom).__geo_interface__)) + elif isinstance(geom, str): + return json.loads(geom) + elif isinstance(geom, dict): + return geom + raise DatabaseError("Received unexpected geometry format from database") + + def __init__(self, obj: Any): + properties = {} + for field in INDEXED_FIELDS: + # Use getattr to accommodate extension namespaces + field_value = getattr(obj, field.split(":")[-1]) + if field == "datetime": + field_value = field_value.strftime(DATETIME_RFC339) + properties[field] = field_value + obj.properties.update(ItemProperties(**properties)) + # Create inferred links + item_links = ItemLinks( + collection_id=obj.collection_id, base_url=obj.base_url, item_id=obj.id + ).create_links() + # Resolve existing links + if obj.links: + item_links += resolve_links(obj.links, obj.base_url) + obj.type = "Feature" + obj.links = item_links + obj.geometry = self.decode_geom(obj.geometry) + obj.collection = obj.collection_id + super().__init__(obj) + + +class CollectionGetter(GetterDict): + """ + Custom GetterDict used internally by pydantic ORM mode when collection ORM model to pydantic model + """ + + def __init__(self, obj: Any): + # Create inferred links + collection_links = CollectionLinks( + collection_id=obj.id, base_url=obj.base_url + ).create_links() + # Resolve existing links + if obj.links: + collection_links += resolve_links(obj.links, obj.base_url) + obj.links = collection_links + super().__init__(obj) diff --git a/stac_api/models/links.py b/stac_api/models/links.py new file mode 100644 index 000000000..97170f82b --- /dev/null +++ b/stac_api/models/links.py @@ -0,0 +1,93 @@ +from dataclasses import dataclass +from typing import List, Dict +from urllib.parse import urljoin + +from stac_pydantic.shared import Link, MimeTypes, Relations + + +# These can be inferred from the item/collection so they aren't included in the database +# Instead they are dynamically generated when querying the database using the classes defined below +INFERRED_LINK_RELS = ["self", "item", "parent", "collection", "root"] + + +def filter_links(links: List[Dict]) -> List[Dict]: + """Remove inferred links""" + return [l for l in links if l["rel"] not in INFERRED_LINK_RELS] + + +@dataclass +class BaseLinks: + """Create inferred links common to collections and items""" + + collection_id: str + base_url: str + + def root(self) -> Link: + return Link( + rel=Relations.root, type=MimeTypes.json, href=urljoin(self.base_url, "/") + ) + + +@dataclass +class CollectionLinks(BaseLinks): + """Create inferred links specific to collections""" + + def self(self) -> Link: + return Link( + rel=Relations.self, + type=MimeTypes.json, + href=urljoin(self.base_url, f"/collections/{self.collection_id}"), + ) + + def parent(self) -> Link: + return Link( + rel=Relations.parent, type=MimeTypes.json, href=urljoin(self.base_url, "/") + ) + + def item(self) -> Link: + return Link( + rel=Relations.item, + type=MimeTypes.geojson, + href=urljoin(self.base_url, f"/collections/{self.collection_id}/items"), + ) + + def create_links(self) -> List[Link]: + return [self.self(), self.parent(), self.item(), self.root()] + + +@dataclass +class ItemLinks(BaseLinks): + """Create inferred links specific to items""" + + item_id: str + + def self(self) -> Link: + return Link( + rel=Relations.self, + type=MimeTypes.geojson, + href=urljoin( + self.base_url, f"/collections/{self.collection_id}/items/{self.item_id}" + ), + ) + + def parent(self) -> Link: + return Link( + rel=Relations.parent, + type=MimeTypes.json, + href=urljoin(self.base_url, f"/collections/{self.collection_id}"), + ) + + def collection(self) -> Link: + return Link( + rel=Relations.collection, + type=MimeTypes.json, + href=urljoin(self.base_url, f"/collections/{self.collection_id}"), + ) + + def create_links(self) -> List[Link]: + return [ + self.self(), + self.parent(), + self.collection(), + self.root(), + ] diff --git a/stac_api/models/schemas.py b/stac_api/models/schemas.py new file mode 100644 index 000000000..6dbc3035b --- /dev/null +++ b/stac_api/models/schemas.py @@ -0,0 +1,194 @@ +from dataclasses import dataclass +from enum import auto +from datetime import datetime +import operator +from types import DynamicClassAttribute +from typing import Any, Callable, Dict, List, Optional, Set, Union + +from geojson_pydantic.geometries import Polygon +from pydantic import root_validator, Field +from shapely.geometry import Polygon as ShapelyPolygon, shape +import sqlalchemy as sa +from stac_pydantic import ( + Collection as CollectionBase, + Item as ItemBase, +) +from stac_pydantic.shared import Link +from stac_pydantic.utils import AutoValueEnum +from stac_pydantic.api import Search +from stac_pydantic.api.search import DATETIME_RFC339 +from stac_pydantic.api.extensions.fields import FieldsExtension as FieldsBase + +from .decompose import CollectionGetter, ItemGetter +from .. import settings + +# Be careful: https://github.com/samuelcolvin/pydantic/issues/1423#issuecomment-642797287 +NumType = Union[float, int] + + +class Operator(str, AutoValueEnum): + """ + Define our own operators because all operators defined in stac-pydantic are not currently supported. + """ + + eq = auto() + ne = auto() + lt = auto() + le = auto() + gt = auto() + ge = auto() + # TODO: These are defined in the spec but aren't currently implemented by the api + # startsWith = auto() + # endsWith = auto() + # contains = auto() + # in = auto() + + @DynamicClassAttribute + def operator(self) -> Callable[[Any, Any], bool]: + """Return python operator""" + return getattr(operator, self._value_) + + +class Queryables(str, AutoValueEnum): + """ + Define an enum of queryable fields and their data type. Queryable fields are explicitly defined for two reasons: + 1. So the caller knows which fields they can query by + 2. Because JSONB queries with sqlalchemy ORM require casting the type of the field at runtime + (see ``QueryableTypes``) + + # TODO: Let the user define these in a config file + """ + + orientation = auto() + gsd = auto() + epsg = "proj:epsg" + height = auto() + width = auto() + minzoom = "cog:minzoom" + maxzoom = "cog:maxzoom" + dtype = "cog:dtype" + + +@dataclass +class QueryableTypes: + """ + Define an enum of the field type of each queryable field + + # TODO: Let the user define these in a config file + # TODO: There is a much better way of defining this field <> type mapping than two enums with same keys + """ + + orientation = sa.String + gsd = sa.Float + epsg = sa.Integer + height = sa.Integer + width = sa.Integer + minzoom = sa.Integer + maxzoom = sa.Integer + dtype = sa.String + + +class FieldsExtension(FieldsBase): + include: Optional[Set[str]] = set() + exclude: Optional[Set[str]] = set() + + def _get_field_dict(self, fields: Set[str]) -> Dict: + """ + Internal method to reate a dictionary for advanced include or exclude of pydantic fields on model export + + Ref: https://pydantic-docs.helpmanual.io/usage/exporting_models/#advanced-include-and-exclude + """ + field_dict = {} + for field in fields: + if "." in field: + parent, key = field.split(".") + if parent not in field_dict: + field_dict[parent] = {key} + else: + field_dict[parent].add(key) + else: + field_dict[field] = ... + return field_dict + + @property + def filter_fields(self) -> Dict: + """ + Create dictionary of fields to include/exclude on model export based on the included and excluded fields passed + to the API + + Ref: https://pydantic-docs.helpmanual.io/usage/exporting_models/#advanced-include-and-exclude + """ + # Include default set of fields + include = settings.DEFAULT_INCLUDES + # If only include is specified, add fields to default set + if self.include and not self.exclude: + include = include.union(self.include) + # If both include + exclude specified, find the difference between sets but don't remove any default fields + # If we remove default fields we will get a validation error + elif self.include and self.exclude: + include = include.union(self.include) - ( + self.exclude - settings.DEFAULT_INCLUDES + ) + return { + "include": self._get_field_dict(include), + "exclude": self._get_field_dict(self.exclude - settings.DEFAULT_INCLUDES), + } + + +class Collection(CollectionBase): + links: Optional[List[Link]] + + class Config: + orm_mode = True + use_enum_values = True + getter_dict = CollectionGetter + + +class Item(ItemBase): + geometry: Polygon + links: Optional[List[Link]] + + class Config: + json_encoders = {datetime: lambda v: v.strftime(DATETIME_RFC339)} + use_enum_values = True + orm_mode = True + getter_dict = ItemGetter + + +class STACSearch(Search): + # Make collections optional, default to searching all collections if none are provided + collections: Optional[List[str]] = None + # Override default field extension to include default fields and pydantic includes/excludes factory + field: FieldsExtension = Field(FieldsExtension(), alias="fields") + # Override query extension with supported operators + query: Optional[Dict[Queryables, Dict[Operator, Any]]] + token: Optional[str] = None + + @root_validator + def include_query_fields(cls, values: Dict) -> Dict: + """ + Root validator to ensure query fields are included in the API response + """ + if values["query"]: + query_include = set( + [ + k.value if k in settings.INDEXED_FIELDS else f"properties.{k.value}" + for k in values["query"] + ] + ) + if not values["field"].include: + values["field"].include = query_include + else: + values["field"].include.union(query_include) + return values + + def polygon(self) -> Optional[ShapelyPolygon]: + """ + Convenience method to create a shapely polygon for the spatial query (either `intersects` or `bbox`) + """ + if self.intersects: + return shape(self.intersects) + elif self.bbox: + return ShapelyPolygon.from_bounds(*self.bbox) + else: + return None diff --git a/stac_api/resources/__init__.py b/stac_api/resources/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/stac_api/resources/collection.py b/stac_api/resources/collection.py new file mode 100644 index 000000000..5b5a17cc6 --- /dev/null +++ b/stac_api/resources/collection.py @@ -0,0 +1,152 @@ +from typing import List + +from fastapi import APIRouter, Depends +from starlette import status +from starlette.exceptions import HTTPException + +from .. import errors +from ..clients import collection_crud_client_factory +from ..clients.collection_crud import CollectionCrudClient +from ..models import schemas +from ..utils.dependencies import discover_base_url + + +router = APIRouter() + + +@router.post( + "/collections", + summary="Create a new collection", + response_model=schemas.Collection, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def create_collection( + collection: schemas.Collection, + crud_client: CollectionCrudClient = Depends(collection_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.create(collection) + except errors.ConflictError as e: + raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=e.message) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + row_data.base_url = base_url + return row_data + + +@router.put( + "/collections", + summary="Update a collection if it exists, otherwise create a new collection", + response_model=schemas.Collection, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def update_collection_by_id( + collection: schemas.Collection, + crud_client: CollectionCrudClient = Depends(collection_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.update(collection) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + row_data.base_url = base_url + return row_data + + +@router.get( + "/collections", + summary="Get all collections", + response_model=List[schemas.CollectionBase], + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def get_all_collections( + crud_client: CollectionCrudClient = Depends(collection_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.get_all_collections() + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + response_data = [] + for row in row_data: + row.base_url = base_url + response_data.append(schemas.Collection.from_orm(row)) + return response_data + + +@router.get( + "/collections/{collectionId}", + summary="Get a collection by id", + response_model=schemas.Collection, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def get_collection_by_id( + collectionId: str, + crud_client: CollectionCrudClient = Depends(collection_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.read(collectionId) + except errors.NotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=e.message) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + row_data.base_url = base_url + return row_data + + +@router.delete( + "/collections/{collectionId}", + summary="Delete a collection by id", + response_model=schemas.Collection, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def delete_collection_by_id( + collectionId: str, + crud_client: CollectionCrudClient = Depends(collection_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.delete(collectionId) + except errors.NotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=e.message) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + row_data.base_url = base_url + return row_data diff --git a/stac_api/resources/conformance.py b/stac_api/resources/conformance.py new file mode 100644 index 000000000..7889c94b2 --- /dev/null +++ b/stac_api/resources/conformance.py @@ -0,0 +1,63 @@ +from urllib.parse import urljoin + +from fastapi import APIRouter, Depends +from stac_pydantic.api import ConformanceClasses, LandingPage +from stac_pydantic.shared import Link, MimeTypes, Relations + +from ..clients import collection_crud_client_factory +from ..models.links import CollectionLinks +from ..utils.dependencies import discover_base_url +from ..clients.collection_crud import CollectionCrudClient + +router = APIRouter() + + +@router.get("/", response_model=LandingPage, response_model_exclude_unset=True) +def landing_page( + base_url: str = Depends(discover_base_url), + crud_client: CollectionCrudClient = Depends(collection_crud_client_factory), +): + resp = LandingPage( + title="Arturo STAC API", + description="Arturo raster datastore", + links=[ + Link(rel=Relations.self, type=MimeTypes.json, href=base_url), + Link( + rel=Relations.docs, + type=MimeTypes.html, + title="OpenAPI docs", + href=urljoin(base_url, "/docs"), + ), + Link( + rel=Relations.conformance, + type=MimeTypes.json, + title="STAC/WFS3 conformance classes implemented by this server", + href=urljoin(base_url, "/conformance"), + ), + Link( + rel=Relations.search, + type=MimeTypes.geojson, + title="STAC search", + href=urljoin(base_url, "/search"), + ), + ], + ) + collections = crud_client.get_all_collections() + for coll in collections: + coll_link = CollectionLinks(collection_id=coll.id, base_url=base_url).self() + coll_link.rel = Relations.child + coll_link.title = coll.title + resp.links.append(coll_link) + return resp + + +@router.get( + "/conformance", response_model=ConformanceClasses, response_model_exclude_unset=True +) +def coformance_classes(): + return ConformanceClasses( + conformsTo=[ + "https://stacspec.org/STAC-api.html", + "http://docs.opengeospatial.org/is/17-069r3/17-069r3.html#ats_geojson", + ] + ) diff --git a/stac_api/resources/item.py b/stac_api/resources/item.py new file mode 100644 index 000000000..46e3c912f --- /dev/null +++ b/stac_api/resources/item.py @@ -0,0 +1,426 @@ +from datetime import datetime +import json +from typing import List, Union, Optional +from urllib.parse import urlencode + +from fastapi import APIRouter, Depends, Query +from starlette import status +from starlette.requests import Request +from starlette.exceptions import HTTPException +from stac_pydantic.item import ItemCollection +from stac_pydantic.shared import Link, Relations +from stac_pydantic.api.extensions.paging import PaginationLink + +from .. import errors +from ..clients import collection_crud_client_factory, item_crud_client_factory +from ..clients.collection_crud import CollectionCrudClient +from ..clients.item_crud import ItemCrudClient +from ..utils.dependencies import discover_base_url, parse_list_factory +from ..models import schemas + +router = APIRouter() + +NumType = Union[float, int] + + +@router.post( + "/collections/{collectionId}/items", + response_model=schemas.Item, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def create_item_by_id( + item: schemas.Item, + crud_client: ItemCrudClient = Depends(item_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.create(item) + except errors.ConflictError as e: + raise HTTPException(status_code=status.HTTP_409_CONFLICT, detail=e.message) + except errors.ForeignKeyError as e: + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=e.message + ) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + row_data.base_url = base_url + return row_data + + +@router.put( + "/collections/{collectionId}/items", + response_model=schemas.Item, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def update_item_by_id( + item: schemas.Item, + crud_client: ItemCrudClient = Depends(item_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.update(item) + except errors.ForeignKeyError as e: + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=e.message + ) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + row_data.base_url = base_url + return row_data + + +@router.delete( + "/collections/{collectionId}/items/{itemId}", + response_model=schemas.Item, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def delete_item_by_id( + itemId: str, + crud_client: ItemCrudClient = Depends(item_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.delete(itemId) + except errors.NotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=e.message) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + row_data.base_url = base_url + return row_data + + +@router.get( + "/collections/{collectionId}/items", + response_model=ItemCollection, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def get_item_collection( + collectionId: str, + limit: int = 10, + token: Optional[str] = None, + crud_client: CollectionCrudClient = Depends(collection_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + page, count = crud_client.get_item_collection(collectionId, limit, token=token) + except errors.NotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=e.message) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + + links = [] + if page.next: + links.append( + PaginationLink( + rel=Relations.next, + type="application/geo+json", + href=f"{base_url}/collections/{collectionId}/items?token={page.next}&limit={limit}", + method="GET", + ) + ) + if page.previous: + links.append( + PaginationLink( + rel=Relations.previous, + type="application/geo+json", + href=f"{base_url}/collections/{collectionId}/items?token={page.previous}&limit={limit}", + method="GET", + ) + ) + + response_features = [] + for item in page: + item.base_url = base_url + response_features.append(schemas.Item.from_orm(item)) + + return ItemCollection( + type="FeatureCollection", + context={"returned": len(page), "limit": limit, "matched": count,}, + features=response_features, + links=links, + ) + + +@router.get( + "/collections/{collectionId}/items/{itemId}", + response_model=schemas.Item, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def get_item_by_id( + itemId: str, + crud_client: ItemCrudClient = Depends(item_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + row_data = crud_client.read(itemId) + except errors.NotFoundError as e: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=e.message) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + row_data.base_url = base_url + return row_data + + +@router.post( + "/search", + response_model=ItemCollection, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def search_items_post( + search_request: schemas.STACSearch, + crud_client: ItemCrudClient = Depends(item_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + try: + page, count = crud_client.stac_search(search_request) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + + links = [] + if page.next: + links.append( + PaginationLink( + rel=Relations.next, + type="application/geo+json", + href=f"{base_url}/search", + method="POST", + body={"token": page.next}, + merge=True, + ) + ) + if page.previous: + links.append( + PaginationLink( + rel=Relations.previous, + type="application/geo+json", + href=f"{base_url}/search", + method="POST", + body={"token": page.previous}, + merge=True, + ) + ) + + response_features = [] + filter_kwargs = search_request.field.filter_fields + for item in page: + item.base_url = base_url + response_features.append(schemas.Item.from_orm(item).to_dict(**filter_kwargs)) + + # Geoalchemy doesn't have a good way of calculating extent of many features, so we'll calculate it outside the db + bbox = None + if count > 0: + xvals = [ + item + for sublist in [ + [float(item["bbox"][0]), float(item["bbox"][2])] + for item in response_features + ] + for item in sublist + ] + yvals = [ + item + for sublist in [ + [float(item["bbox"][1]), float(item["bbox"][3])] + for item in response_features + ] + for item in sublist + ] + bbox = (min(xvals), min(yvals), max(xvals), max(yvals)) + + return ItemCollection( + type="FeatureCollection", + context={ + "returned": len(page), + "limit": search_request.limit, + "matched": count, + }, + features=response_features, + links=links, + bbox=bbox, + ) + + +@router.get( + "/search", + response_model=ItemCollection, + response_model_exclude_unset=True, + response_model_exclude_none=True, +) +def search_items_get( + request: Request, + collections: Optional[List[str]] = Depends(parse_list_factory("collections")), + ids: Optional[List[str]] = Depends(parse_list_factory("ids")), + bbox: Optional[List[NumType]] = Depends(parse_list_factory("bbox")), + datetime: Optional[Union[str, datetime]] = Query(None), + limit: Optional[int] = Query(10), + query: Optional[str] = Query(None), + token: Optional[str] = None, + fields: Optional[List[str]] = Depends(parse_list_factory("fields")), + cloudfront_ttl: Optional[int] = 2628000, + sortby: Optional[str] = Depends(parse_list_factory("sortby")), + crud_client: ItemCrudClient = Depends(item_crud_client_factory), + base_url: str = Depends(discover_base_url), +): + # Parse request parameters + base_args = { + "collections": collections, + "ids": ids, + "bbox": bbox, + "limit": limit, + "token": token, + "cloudfront_ttl": cloudfront_ttl, + "query": json.loads(query) if query else query, + } + if datetime: + base_args["datetime"] = datetime + if sortby: + # https://github.com/radiantearth/stac-spec/tree/master/api-spec/extensions/sort#http-get-or-post-form + sort_param = [] + for sort in sortby: + sort_param.append( + {"field": sort[1:], "direction": "asc" if sort[0] == "+" else "desc"} + ) + base_args["sortby"] = sort_param + + if fields: + includes = set() + excludes = set() + for field in fields: + if field[0] == "-": + excludes.add(field[1:]) + elif field[0] == "+": + includes.add(field[1:]) + else: + includes.add(field) + base_args["fields"] = {"include": includes, "exclude": excludes} + + # Do the request + search_request = schemas.STACSearch(**base_args) + filter_kwargs = search_request.field.filter_fields + try: + page, count = crud_client.stac_search(search_request) + except errors.DatabaseError as e: + raise HTTPException( + status_code=status.HTTP_424_FAILED_DEPENDENCY, detail=e.message + ) + except Exception as e: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) + ) + + # Pagination + links = [] + query_params = dict(request.query_params) + if page.next: + query_params["token"] = page.next + links.append( + PaginationLink( + rel=Relations.next, + type="application/geo+json", + href=f"{base_url}/search?{urlencode(query_params)}", + method="GET", + ) + ) + if page.previous: + query_params["token"] = page.previous + links.append( + PaginationLink( + rel=Relations.previous, + type="application/geo+json", + href=f"{base_url}/search?{urlencode(query_params)}", + method="GET", + ) + ) + + # Add OGC Tile links + if not collections: + collections = {item.collection_id for item in page} + + for coll in collections: + links.append( + Link( + rel=Relations.tiles, + type="application/json", + href=f"{base_url}/collections/{coll}/tiles?{urlencode(query_params)}", + ) + ) + + # Decompose to pydantic models and cloudfront signing + response_features = [] + for item in page: + item.base_url = base_url + response_features.append(schemas.Item.from_orm(item).to_dict(**filter_kwargs)) + + # Add bbox + if count > 0: + xvals = [ + item + for sublist in [ + [float(item["bbox"][0]), float(item["bbox"][2])] + for item in response_features + ] + for item in sublist + ] + yvals = [ + item + for sublist in [ + [float(item["bbox"][1]), float(item["bbox"][3])] + for item in response_features + ] + for item in sublist + ] + bbox = (min(xvals), min(yvals), max(xvals), max(yvals)) + + return ItemCollection( + type="FeatureCollection", + context={ + "returned": len(page), + "limit": search_request.limit, + "matched": count, + }, + features=response_features, + links=links, + bbox=bbox, + ) diff --git a/stac_api/resources/mgmt.py b/stac_api/resources/mgmt.py new file mode 100644 index 000000000..6a3891c9f --- /dev/null +++ b/stac_api/resources/mgmt.py @@ -0,0 +1,14 @@ +from fastapi import APIRouter +from pydantic import BaseModel + + +router = APIRouter() + + +class Message(BaseModel): + message: str + + +@router.get("/_mgmt/ping", response_model=Message) +async def ping(): + return Message(message="PONG") diff --git a/stac_api/settings.py b/stac_api/settings.py new file mode 100644 index 000000000..da0a5c766 --- /dev/null +++ b/stac_api/settings.py @@ -0,0 +1,43 @@ +from starlette.config import Config + + +config = Config(".env") + + +ENVIRONMENT = config("ENVIRONMENT", cast=str) +DEBUG = config("DEBUG", cast=bool, default=False) +TESTING = config("TESTING", cast=bool, default=False) + + +# Database config +POSTGRES_USER = config("POSTGRES_USER", cast=str) +POSTGRES_PASS = config("POSTGRES_PASS", cast=str) +POSTGRES_DBNAME = config("POSTGRES_DBNAME", cast=str) +POSTGRES_PORT = config("POSTGRES_PORT", cast=str) +POSTGRES_HOST_READER = config("POSTGRES_HOST_READER", cast=str) +POSTGRES_HOST_WRITER = config("POSTGRES_HOST_WRITER", cast=str) + + +# Database connection strings +SQLALCHEMY_DATABASE_READER = f"postgresql://{POSTGRES_USER}:{POSTGRES_PASS}@{POSTGRES_HOST_READER}:{POSTGRES_PORT}/{POSTGRES_DBNAME}" +SQLALCHEMY_DATABASE_WRITER = f"postgresql://{POSTGRES_USER}:{POSTGRES_PASS}@{POSTGRES_HOST_WRITER}:{POSTGRES_PORT}/{POSTGRES_DBNAME}" + + +# Fields which are defined by STAC but not included in the database model +FORBIDDEN_FIELDS = {"type", "stac_version", "stac_extensions"} + + +# Fields which are item properties but indexed as distinct fields in the database model +INDEXED_FIELDS = {"datetime"} + + +# Fields which are always included in the response (fields extension) +DEFAULT_INCLUDES = { + "id", + "type", + "geometry", + "bbox", + "links", + "assets", + "properties.datetime", +} diff --git a/stac_api/utils/__init__.py b/stac_api/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/stac_api/utils/dependencies.py b/stac_api/utils/dependencies.py new file mode 100644 index 000000000..892bff856 --- /dev/null +++ b/stac_api/utils/dependencies.py @@ -0,0 +1,58 @@ +from dataclasses import dataclass +from typing import Callable, List, Optional + +from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session +from starlette.requests import Request + + +ENGINE_READER: Optional[Engine] = None +ENGINE_WRITER: Optional[Engine] = None +DB_READER: Optional[Session] = None +DB_WRITER: Optional[Session] = None + + +@dataclass +class DatabaseConnectionError(Exception): + message: str + + +def discover_base_url(request: Request): + """Discover base url of a request""" + return f"{request.url.scheme}://{request.url.netloc}" + + +def parse_list_factory(varname) -> Callable[[Request], List[str]]: + """Parse the value of a specific parameter from comma-delimited string to list of strings""" + + def _parse(request: Request): + param = request.query_params.get(varname) + return param.split(",") if param else param + + return _parse + + +def database_reader_factory() -> Session: + """Instantiate the database reader session""" + try: + if not DB_READER: + raise DatabaseConnectionError( + message="Database engine has not been created" + ) + db = DB_READER() + yield db + finally: + db.close() + + +def database_writer_factory() -> Session: + """Instantiate the database writer session""" + try: + if not DB_WRITER: + raise DatabaseConnectionError( + message="Database engine has not been created" + ) + db = DB_WRITER() + yield db + finally: + db.close() diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/clients/__init__.py b/tests/clients/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/clients/test_crud.py b/tests/clients/test_crud.py new file mode 100644 index 000000000..9f461107e --- /dev/null +++ b/tests/clients/test_crud.py @@ -0,0 +1,196 @@ +from random import randint + +import pytest +from psycopg2._psycopg import sqlstate_errors +from sqlalchemy.orm import Session + +from stac_api.errors import ( + ConflictError, + DatabaseError, + NotFoundError, + ForeignKeyError, +) +from stac_api.models import database +from stac_api.models.schemas import Collection, Item +from stac_api.clients.base_crud import BaseCrudClient +from stac_api.clients.collection_crud import CollectionCrudClient + +from ..conftest import create_mock + + +def random_pg_exception(): + """Generate a random psycopg2 exception""" + pg_errors = list(sqlstate_errors) + return sqlstate_errors[pg_errors[randint(0, len(pg_errors) - 1)]] + + +def test_create_and_delete_item(item_crud_client, load_test_data): + """Test creation and deletion of a single item""" + test_item = Item(**load_test_data("test_item.json")) + row_data = item_crud_client.create(test_item) + assert test_item.id == row_data.id + + deleted_item = item_crud_client.delete(test_item.id) + assert deleted_item.id == test_item.id + + +def test_create_item_already_exists(item_crud_client, load_test_data): + """Test creation of an item which already exists""" + test_item = Item(**load_test_data("test_item.json")) + row_data = item_crud_client.create(test_item) + assert test_item.id == row_data.id + + with pytest.raises(ConflictError): + test_item_duplicate = Item(**load_test_data("test_item.json")) + item_crud_client.create(test_item_duplicate) + + +def test_delete_missing_item(item_crud_client): + """Test deletion of an item which does not exist""" + with pytest.raises(NotFoundError): + item_crud_client.delete("this id doesn't exist") + + +def test_create_item_missing_collection(item_crud_client, load_test_data): + """Test creation of an item without a parent collection""" + test_item = Item(**load_test_data("test_item.json")) + test_item.collection = "this collection doesn't exist" + with pytest.raises(ForeignKeyError): + item_crud_client.create(test_item) + + +def test_update_item_already_exists(item_crud_client, load_test_data): + """Test updating an item""" + test_item = Item(**load_test_data("test_item.json")) + item_crud_client.create(test_item) + + test_item = Item(**load_test_data("test_item.json")) + test_item.properties.new_prop = "test" + updated_row = item_crud_client.update(test_item) + + assert updated_row.properties["new_prop"] == "test" + + +def test_update_new_item(item_crud_client, load_test_data): + """Test updating an item which doesn't exist (same as creation)""" + test_item = Item(**load_test_data("test_item.json")) + row_data = item_crud_client.update(test_item) + assert test_item.id == row_data.id + + +def test_create_and_delete_collection(collection_crud_client, load_test_data): + """Test creation and deletion of a collection""" + test_collection = Collection(**load_test_data("test_collection.json")) + row_data = collection_crud_client.create(test_collection) + assert test_collection.id == row_data.id + + deleted_collection = collection_crud_client.delete(test_collection.id) + assert deleted_collection.id == test_collection.id + + +def test_create_collection_conflict(collection_crud_client, load_test_data): + """Test creation of a collection which already exists""" + test_collection = Collection(**load_test_data("test_collection.json")) + row_data = collection_crud_client.create(test_collection) + assert test_collection.id == row_data.id + + with pytest.raises(ConflictError): + test_collection = Collection(**load_test_data("test_collection.json")) + collection_crud_client.create(test_collection) + + +def test_delete_missing_collection(collection_crud_client): + """Test deletion of a collection which does not exist""" + with pytest.raises(NotFoundError): + collection_crud_client.delete("this id also doesn't exist") + + +def test_update_collection_already_exists(collection_crud_client, load_test_data): + """Test updating a collection which already exists""" + test_collection = Collection(**load_test_data("test_collection.json")) + row_data = collection_crud_client.create(test_collection) + + test_collection = Collection(**load_test_data("test_collection.json")) + test_collection.keywords.append("new keyword") + updated_row = collection_crud_client.update(test_collection) + + assert "new keyword" in updated_row.keywords + + +def test_update_new_collection(collection_crud_client, load_test_data): + """Test update a collection which does not exist (same as creation)""" + test_collection = Collection(**load_test_data("test_collection.json")) + row_data = collection_crud_client.update(test_collection) + assert test_collection.id == row_data.id + + +def test_read_database_error(): + """Test custom exception is raised on psycopg2 errors""" + mock_session = create_mock( + client=Session, mocked_method="query", error=random_pg_exception() + ) + crud_client = BaseCrudClient( + reader_session=mock_session, writer_session=mock_session, table=database.Item + ) + + with pytest.raises(DatabaseError): + crud_client.read("test-item") + + +def test_update_database_error(load_test_data): + """Test custom exception is raised on psycopg2 errors""" + test_item = Item(**load_test_data("test_item.json")) + mock_session = create_mock( + client=Session, mocked_method="commit", error=random_pg_exception() + ) + crud_client = BaseCrudClient( + reader_session=mock_session, writer_session=mock_session, table=database.Item + ) + + with pytest.raises(DatabaseError): + crud_client.update(test_item) + + +def test_create_database_error(load_test_data): + """Test custom exception is raised on psycopg2 errors""" + test_item = Item(**load_test_data("test_item.json")) + mock_session = create_mock( + client=Session, mocked_method="query", error=random_pg_exception() + ) + crud_client = BaseCrudClient( + reader_session=mock_session, writer_session=mock_session, table=database.Item + ) + + with pytest.raises(DatabaseError): + crud_client.create(test_item) + + +def test_get_all_collections_database_error(pagination_client): + """Test custom exception is raised on psycopg2 errors""" + mock_session = create_mock( + client=Session, mocked_method="query", error=random_pg_exception() + ) + crud_client = CollectionCrudClient( + reader_session=mock_session, + writer_session=mock_session, + table=database.Collection, + pagination_client=pagination_client, + ) + with pytest.raises(DatabaseError): + crud_client.get_all_collections() + + +def test_get_item_collection_database_error(load_test_data, pagination_client): + """Test custom exception is raised on psycopg2 errors""" + test_coll = Collection(**load_test_data("test_collection.json")) + mock_session = create_mock( + client=Session, mocked_method="query", error=random_pg_exception() + ) + crud_client = CollectionCrudClient( + reader_session=mock_session, + writer_session=mock_session, + table=database.Collection, + pagination_client=pagination_client, + ) + with pytest.raises(DatabaseError): + crud_client.get_item_collection(test_coll.id, limit=10) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..9df5a5092 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,182 @@ +from contextlib import contextmanager +import json +import os +from starlette.config import environ +from typing import Callable, ContextManager, Dict, List, Union +from unittest.mock import Mock, MagicMock + +# This line would raise an error if we use it after 'settings' has been imported. +environ["TESTING"] = "true" +environ["DEBUG"] = "true" + +import pytest +from sqlalchemy import create_engine +from sqlalchemy.engine import Engine +from sqlalchemy.orm import sessionmaker, Session +from starlette.testclient import TestClient + +from stac_api import settings +from stac_api.app import app +from stac_api.models import database, schemas +from stac_api.clients.collection_crud import CollectionCrudClient +from stac_api.clients.item_crud import ItemCrudClient +from stac_api.clients.tokens import PaginationTokenClient +from stac_api.errors import NotFoundError + +DATA_DIR = os.path.join(os.path.dirname(__file__), "data") + + +def create_mock(client: Callable, mocked_method: str, error: Exception) -> MagicMock: + """Create a mock client which raises an exception""" + mock_client = MagicMock(client) + setattr(mock_client, mocked_method, Mock(side_effect=error)) + return mock_client + + +@contextmanager +def create_test_client_with_error( + client: Union[ItemCrudClient, CollectionCrudClient], + mocked_method: str, + dependency: Callable, + error: Exception, +) -> ContextManager[TestClient]: + """Inject a mock client into the test app""" + app.dependency_overrides[dependency] = lambda: create_mock( + client, mocked_method, error + ) + with TestClient(app) as test_client: + yield test_client + + app.dependency_overrides = {} + + +@pytest.fixture +def app_client(load_test_data): + """ + Make a client fixture available to test cases. + """ + test_collection = load_test_data("test_collection.json") + with TestClient(app) as test_client: + # Create collection + test_client.post("/collections", json=test_collection) + yield test_client + + # Cleanup test data + collections = test_client.get("/collections").json() + for coll in collections: + collection_id = coll["id"] + if "test" in collection_id: + # Get collection items + item_collection = test_client.get( + f"/collections/{collection_id}/items", params={"limit": 500} + ).json() + for item in item_collection["features"]: + test_client.delete(f"/collections/{collection_id}/items/{item['id']}") + test_client.delete(f"/collections/{collection_id}") + + +@pytest.fixture +def load_test_data() -> Callable[[str], Dict]: + def load_file(filename: str) -> Dict: + with open(os.path.join(DATA_DIR, filename)) as file: + return json.load(file) + + return load_file + + +def load_all_test_data(filter: str) -> List[Dict]: + return [ + json.load(open(os.path.join(DATA_DIR, f))) + for f in os.listdir(DATA_DIR) + if filter in f + ] + + +@pytest.fixture +def reader_connection() -> Session: + """Create a reader connection""" + engine = create_engine(settings.SQLALCHEMY_DATABASE_READER) + db_session = sessionmaker(autocommit=False, autoflush=False, bind=engine)() + yield db_session + db_session.close() + engine.dispose() + + +@pytest.fixture +def writer_connection() -> Session: + """Create a writer connection""" + engine = create_engine(settings.SQLALCHEMY_DATABASE_WRITER) + db_session = sessionmaker(autocommit=False, autoflush=False, bind=engine)() + yield db_session + db_session.close() + engine.dispose() + + +@pytest.fixture +def pagination_client( + reader_connection: Session, writer_connection: Session +) -> PaginationTokenClient: + """Create a pagination client""" + return PaginationTokenClient( + reader_session=reader_connection, + writer_session=writer_connection, + table=database.PaginationToken, + ) + + +@pytest.fixture +def collection_crud_client( + reader_connection: Session, + writer_connection: Session, + pagination_client: PaginationTokenClient, +) -> CollectionCrudClient: + """Create a collection client. Clean up data after each test. """ + client = CollectionCrudClient( + reader_session=reader_connection, + writer_session=writer_connection, + table=database.Collection, + pagination_client=pagination_client, + ) + yield client + + # Cleanup collections + for test_data in load_all_test_data("collection"): + try: + client.delete(test_data["id"]) + except NotFoundError: + pass + + +@pytest.fixture +def item_crud_client( + reader_connection: Session, + writer_connection: Session, + collection_crud_client: CollectionCrudClient, + load_test_data, +) -> ItemCrudClient: + """Create an item client. Create a collection used for testing and clean up data after each test.""" + # Create a test collection (foreignkey) + test_collection = schemas.Collection(**load_test_data("test_collection.json")) + collection_crud_client.create(test_collection) + + client = ItemCrudClient( + reader_session=reader_connection, + writer_session=writer_connection, + table=database.Item, + collection_crud=collection_crud_client, + pagination_client=pagination_client, + ) + yield client + + # Cleanup test items + for test_data in load_all_test_data("item"): + try: + client.delete(test_data["id"]) + except NotFoundError: + pass + + # Cleanup collection + try: + collection_crud_client.delete(test_collection.id) + except NotFoundError: + pass diff --git a/tests/data/test_collection.json b/tests/data/test_collection.json new file mode 100644 index 000000000..be7741713 --- /dev/null +++ b/tests/data/test_collection.json @@ -0,0 +1 @@ +{"id":"test-collection","description":"Landat 8 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.","stac_version":"0.9.0","license":"PDDL-1.0","extent":{"spatial":{"bbox":[[-180.0,-90.0,180.0,90.0]]},"temporal":{"interval":[["2013-06-01",null]]}},"links":[{"href":"http://localhost:8081/collections/landsat-8-l1","rel":"self","type":"application/json"},{"href":"http://localhost:8081/","rel":"parent","type":"application/json"},{"href":"http://localhost:8081/collections/landsat-8-l1/items","rel":"item","type":"application/geo+json"},{"href":"http://localhost:8081/","rel":"root","type":"application/json"}],"title":"Landsat 8 L1","keywords":["landsat","earth observation","usgs"],"providers":[{"name":"USGS","roles":["producer"],"url":"https://landsat.usgs.gov/"},{"name":"Planet Labs","roles":["processor"],"url":"https://github.com/landsat-pds/landsat_ingestor"},{"name":"AWS","roles":["host"],"url":"https://landsatonaws.com/"},{"name":"Development Seed","roles":["processor"],"url":"https://github.com/sat-utils/sat-api"},{"name":"Earth Search by Element84","description":"API of Earth on AWS datasets","roles":["host"],"url":"https://element84.com"}]} \ No newline at end of file diff --git a/tests/data/test_item.json b/tests/data/test_item.json new file mode 100644 index 000000000..c1cbb4fbf --- /dev/null +++ b/tests/data/test_item.json @@ -0,0 +1,257 @@ +{ +"type": "Feature", +"id": "test-item", +"geometry": { +"coordinates": [ +[ +[ +152.15052873427666, +-33.82243006904891 +], +[ +150.1000346138806, +-34.257132625788756 +], +[ +149.5776607193635, +-32.514709769700254 +], +[ +151.6262528041627, +-32.08081674221862 +], +[ +152.15052873427666, +-33.82243006904891 +] +] +], +"type": "Polygon" +}, +"properties": { +"datetime": "2020-02-12T12:30:22Z", +"landsat:scene_id": "LC82081612020043LGN00", +"eo:row": "161", +"eo:gsd": 15, +"eo:bands": [ +{ +"gsd": 30, +"name": "B1", +"common_name": "coastal", +"center_wavelength": 0.44, +"full_width_half_max": 0.02 +}, +{ +"gsd": 30, +"name": "B2", +"common_name": "blue", +"center_wavelength": 0.48, +"full_width_half_max": 0.06 +}, +{ +"gsd": 30, +"name": "B3", +"common_name": "green", +"center_wavelength": 0.56, +"full_width_half_max": 0.06 +}, +{ +"gsd": 30, +"name": "B4", +"common_name": "red", +"center_wavelength": 0.65, +"full_width_half_max": 0.04 +}, +{ +"gsd": 30, +"name": "B5", +"common_name": "nir", +"center_wavelength": 0.86, +"full_width_half_max": 0.03 +}, +{ +"gsd": 30, +"name": "B6", +"common_name": "swir16", +"center_wavelength": 1.6, +"full_width_half_max": 0.08 +}, +{ +"gsd": 30, +"name": "B7", +"common_name": "swir22", +"center_wavelength": 2.2, +"full_width_half_max": 0.2 +}, +{ +"gsd": 15, +"name": "B8", +"common_name": "pan", +"center_wavelength": 0.59, +"full_width_half_max": 0.18 +}, +{ +"gsd": 30, +"name": "B9", +"common_name": "cirrus", +"center_wavelength": 1.37, +"full_width_half_max": 0.02 +}, +{ +"gsd": 100, +"name": "B10", +"common_name": "lwir11", +"center_wavelength": 10.9, +"full_width_half_max": 0.8 +}, +{ +"gsd": 100, +"name": "B11", +"common_name": "lwir12", +"center_wavelength": 12, +"full_width_half_max": 1 +} +], +"landsat:revision": "00", +"eo:sun_azimuth": -148.83296771, +"eo:instrument": "OLI_TIRS", +"landsat:product_id": "LC08_L1GT_208161_20200212_20200212_01_RT", +"eo:cloud_cover": -1, +"landsat:tier": "RT", +"landsat:processing_level": "L1GT", +"eo:column": "208", +"eo:platform": "landsat-8", +"proj:epsg": 32756, +"eo:sun_elevation": -37.30791534, +"eo:off_nadir": 0, +"height": 2500, +"width": 2500 +}, +"bbox": [ +149.57574, +-34.25796, +152.15194, +-32.07915 +], +"collection": "test-collection", +"assets": { +"B1": { +"type": "image/tiff; application=geotiff", +"title": "Band 1 (coastal)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B1.TIF", +"description": "this is a description" +}, +"B2": { +"type": "image/tiff; application=geotiff", +"title": "Band 2 (blue)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B2.TIF", +"description": "this is a description" +}, +"B3": { +"type": "image/tiff; application=geotiff", +"title": "Band 3 (green)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B3.TIF", +"description": "this is a description" +}, +"B4": { +"type": "image/tiff; application=geotiff", +"title": "Band 4 (red)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B4.TIF", +"description": "this is a description" +}, +"B5": { +"type": "image/tiff; application=geotiff", +"title": "Band 5 (nir)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B5.TIF", +"description": "this is a description" +}, +"B6": { +"type": "image/tiff; application=geotiff", +"title": "Band 6 (swir16)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B6.TIF", +"description": "this is a description" +}, +"B7": { +"type": "image/tiff; application=geotiff", +"title": "Band 7 (swir22)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B7.TIF", +"description": "this is a description" +}, +"B8": { +"type": "image/tiff; application=geotiff", +"title": "Band 8 (pan)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B8.TIF", +"description": "this is a description" +}, +"B9": { +"type": "image/tiff; application=geotiff", +"title": "Band 9 (cirrus)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B9.TIF", +"description": "this is a description" +}, +"ANG": { +"type": "text/plain", +"title": "Angle coefficients file", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_ANG.txt", +"description": "this is a description" +}, +"B10": { +"type": "image/tiff; application=geotiff", +"title": "Band 10 (lwir)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B10.TIF", +"description": "this is a description" +}, +"B11": { +"type": "image/tiff; application=geotiff", +"title": "Band 11 (lwir)", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_B11.TIF", +"description": "this is a description" +}, +"BQA": { +"type": "image/tiff; application=geotiff", +"title": "Band quality data", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_BQA.TIF", +"description": "this is a description" +}, +"MTL": { +"type": "text/plain", +"title": "original metadata file", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_MTL.txt", +"description": "this is a description" +}, +"index": { +"type": "text/html", +"title": "HTML index page", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_MTL.txt", +"description": "this is a description" +}, +"thumbnail": { +"type": "image/jpeg", +"title": "Thumbnail image", +"href": "https://landsat-pds.s3.amazonaws.com/c1/L8/208/161/LC08_L1GT_208161_20200212_20200212_01_RT/LC08_L1GT_208161_20200212_20200212_01_RT_thumb_large.jpg", +"description": "this is a description" +} +}, +"links": [ +{ +"href": "http://localhost:8081/collections/landsat-8-l1/items/LC82081612020043", +"rel": "self", +"type": "application/geo+json" +}, +{ +"href": "http://localhost:8081/collections/landsat-8-l1", +"rel": "parent", +"type": "application/json" +}, +{ +"href": "http://localhost:8081/collections/landsat-8-l1", +"rel": "collection", +"type": "application/json" +}, +{ +"href": "http://localhost:8081/", +"rel": "root", +"type": "application/json" +} +] +} \ No newline at end of file diff --git a/tests/resources/__init__.py b/tests/resources/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/resources/test_collection.py b/tests/resources/test_collection.py new file mode 100644 index 000000000..174c9fe0a --- /dev/null +++ b/tests/resources/test_collection.py @@ -0,0 +1,143 @@ +from stac_api.errors import DatabaseError +from stac_api.clients import collection_crud_client_factory +from stac_api.clients.collection_crud import CollectionCrudClient + +from ..conftest import create_test_client_with_error + + +def test_create_and_delete_collection(app_client, load_test_data): + """Test creation and deletion of a collection""" + test_collection = load_test_data("test_collection.json") + test_collection["id"] = "test" + + resp = app_client.post(f"/collections", json=test_collection) + assert resp.status_code == 200 + + resp = app_client.delete(f"/collections/{test_collection['id']}") + assert resp.status_code == 200 + + +def test_create_collection_conflict(app_client, load_test_data): + """Test creation of a collection which already exists""" + # This collection ID is created in the fixture, so this should be a conflict + test_collection = load_test_data("test_collection.json") + resp = app_client.post(f"/collections", json=test_collection) + assert resp.status_code == 409 + + +def test_delete_missing_collection(app_client): + """Test deletion of a collection which does not exist""" + resp = app_client.delete(f"/collections/missing-collection") + assert resp.status_code == 404 + + +def test_update_collection_already_exists(app_client, load_test_data): + """Test updating a collection which already exists""" + test_collection = load_test_data("test_collection.json") + test_collection["keywords"].append("test") + resp = app_client.put("/collections", json=test_collection) + assert resp.status_code == 200 + + resp = app_client.get(f"/collections/{test_collection['id']}") + assert resp.status_code == 200 + resp_json = resp.json() + assert "test" in resp_json["keywords"] + + +def test_update_new_collection(app_client, load_test_data): + """Test updating a collection which does not exist (same as creation)""" + test_collection = load_test_data("test_collection.json") + test_collection["id"] = "new-test-collection" + + resp = app_client.put(f"/collections", json=test_collection) + assert resp.status_code == 200 + + resp = app_client.get(f"/collections/{test_collection['id']}") + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["id"] == test_collection["id"] + + +def test_get_all_collections(app_client, load_test_data): + """Test reading all collections""" + test_collection = load_test_data("test_collection.json") + test_collection["id"] = "new-test-collection" + + resp = app_client.post(f"/collections", json=test_collection) + assert resp.status_code == 200 + + resp = app_client.get("/collections") + assert resp.status_code == 200 + resp_json = resp.json() + + assert test_collection["id"] in [coll["id"] for coll in resp_json] + + +def test_collection_not_found(app_client): + """Test read a collection which does not exist""" + resp = app_client.get(f"/collections/does-not-exist") + assert resp.status_code == 404 + + +def test_create_collection_database_error(load_test_data): + """Test 424 is raised on database error""" + test_collection = load_test_data("test_collection.json") + with create_test_client_with_error( + client=CollectionCrudClient, + mocked_method="create", + dependency=collection_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.post("/collections", json=test_collection) + assert resp.status_code == 424 + + +def test_update_collection_database_error(load_test_data): + """Test 424 is raised on database error""" + test_collection = load_test_data("test_collection.json") + with create_test_client_with_error( + client=CollectionCrudClient, + mocked_method="update", + dependency=collection_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.put("/collections", json=test_collection) + assert resp.status_code == 424 + + +def tet_get_all_collections_database_error(): + """Test 424 is raised on database error""" + with create_test_client_with_error( + client=CollectionCrudClient, + mocked_method="get_all_collections", + dependency=collection_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.get("/collections") + assert resp.status_code == 424 + + +def test_get_collection_database_error(load_test_data): + """Test 424 is raised on database error""" + test_collection = load_test_data("test_collection.json") + with create_test_client_with_error( + client=CollectionCrudClient, + mocked_method="read", + dependency=collection_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.get(f"/collections/{test_collection['id']}") + assert resp.status_code == 424 + + +def test_delete_collection_database_error(load_test_data): + """Test 424 is raised on database error""" + test_collection = load_test_data("test_collection.json") + with create_test_client_with_error( + client=CollectionCrudClient, + mocked_method="delete", + dependency=collection_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.delete(f"/collections/{test_collection['id']}") + assert resp.status_code == 424 diff --git a/tests/resources/test_conformance.py b/tests/resources/test_conformance.py new file mode 100644 index 000000000..0a231e150 --- /dev/null +++ b/tests/resources/test_conformance.py @@ -0,0 +1,20 @@ +import pytest + + +def test_landing_page(app_client): + """Test landing page""" + resp = app_client.get("/") + assert resp.status_code == 200 + resp_json = resp.json() + + # Make sure OpenAPI docs are linked + docs = next(filter(lambda link: link["rel"] == "docs", resp_json["links"]))["href"] + resp = app_client.get(docs) + assert resp.status_code == 200 + + # Make sure conformance classes are linked + conf = next(filter(lambda link: link["rel"] == "conformance", resp_json["links"]))[ + "href" + ] + resp = app_client.get(conf) + assert resp.status_code == 200 diff --git a/tests/resources/test_item.py b/tests/resources/test_item.py new file mode 100644 index 000000000..4fe1bcce8 --- /dev/null +++ b/tests/resources/test_item.py @@ -0,0 +1,796 @@ +from copy import deepcopy +from datetime import datetime, timedelta +import json +from random import randint +from shapely.geometry import Polygon +from stac_pydantic.shared import Asset, MimeTypes +from stac_pydantic.api.search import DATETIME_RFC339 +import time +from urllib.parse import urlsplit, urlparse, parse_qs +import uuid + + +from stac_api.errors import DatabaseError +from stac_api.clients import collection_crud_client_factory, item_crud_client_factory +from stac_api.clients.collection_crud import CollectionCrudClient +from stac_api.clients.item_crud import ItemCrudClient + +from ..conftest import create_test_client_with_error + + +def test_create_and_delete_item(app_client, load_test_data): + """Test creation and deletion of a single item (transactions extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + resp = app_client.delete( + f"/collections/{test_item['collection']}/items/{resp.json()['id']}" + ) + assert resp.status_code == 200 + + +def test_create_item_conflict(app_client, load_test_data): + """Test creation of an item which already exists (transactions extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 409 + + +def test_delete_missing_item(app_client, load_test_data): + """Test deletion of an item which does not exist (transactions extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.delete(f"/collections/{test_item['collection']}/items/hijosh") + assert resp.status_code == 404 + + +def test_create_item_missing_collection(app_client, load_test_data): + """Test creation of an item without a parent collection (transactions extension)""" + test_item = load_test_data("test_item.json") + test_item["collection"] = "stac is cool" + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 422 + + +def test_update_item_already_exists(app_client, load_test_data): + """Test updating an item which already exists (transactions extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + test_item["properties"]["eo:gsd"] = 16 + resp = app_client.put( + f"/collections/{test_item['collection']}/items", json=test_item + ) + updated_item = resp.json() + assert updated_item["properties"]["eo:gsd"] == 16 + + +def test_update_new_item(app_client, load_test_data): + """Test updating an item which does not exist (transactions extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.put( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + +def test_update_item_missing_collection(app_client, load_test_data): + """Test updating an item without a parent collection (transactions extension)""" + test_item = load_test_data("test_item.json") + test_item["collection"] = "stac is cool" + resp = app_client.put( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 422 + + +def test_get_item(app_client, load_test_data): + """Test read an item by id (core)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + get_item = app_client.get( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert get_item.status_code == 200 + + +def test_get_item_collection(app_client, load_test_data): + """Test read an item collection (core)""" + item_count = randint(1, 4) + test_item = load_test_data("test_item.json") + + for idx in range(item_count): + _test_item = deepcopy(test_item) + _test_item["id"] = test_item["id"] + str(idx) + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=_test_item + ) + assert resp.status_code == 200 + + resp = app_client.get(f"/collections/{test_item['collection']}/items") + assert resp.status_code == 200 + + item_collection = resp.json() + assert item_collection["context"]["matched"] == len(range(item_count)) + + +def test_pagination(app_client, load_test_data): + """Test item collection pagination (paging extension)""" + item_count = 10 + test_item = load_test_data("test_item.json") + + for idx in range(item_count): + _test_item = deepcopy(test_item) + _test_item["id"] = test_item["id"] + str(idx) + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=_test_item + ) + assert resp.status_code == 200 + + resp = app_client.get( + f"/collections/{test_item['collection']}/items", params={"limit": 3} + ) + assert resp.status_code == 200 + first_page = resp.json() + assert first_page["context"]["returned"] == 3 + + url_components = urlsplit(first_page["links"][0]["href"]) + resp = app_client.get(f"{url_components.path}?{url_components.query}") + assert resp.status_code == 200 + second_page = resp.json() + assert second_page["context"]["returned"] == 3 + + +def test_item_timestamps(app_client, load_test_data): + """Test created and updated timestamps (common metadata)""" + test_item = load_test_data("test_item.json") + start_time = datetime.utcnow() + time.sleep(2) + # Confirm `created` timestamp + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + item = resp.json() + created_dt = datetime.strptime(item["properties"]["created"], DATETIME_RFC339) + assert resp.status_code == 200 + assert start_time < created_dt < datetime.utcnow() + + time.sleep(2) + # Confirm `updated` timestamp + item["properties"]["proj:epsg"] = 4326 + resp = app_client.put(f"/collections/{test_item['collection']}/items", json=item) + assert resp.status_code == 200 + updated_item = resp.json() + + # Created shouldn't change on update + assert item["properties"]["created"] == updated_item["properties"]["created"] + assert ( + datetime.strptime(updated_item["properties"]["updated"], DATETIME_RFC339) + > created_dt + ) + + +def test_item_search_by_id_post(app_client, load_test_data): + """Test POST search by item id (core)""" + ids = ["test1", "test2", "test3"] + for id in ids: + test_item = load_test_data("test_item.json") + test_item["id"] = id + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + params = {"collections": [test_item["collection"]], "ids": ids} + resp = app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == len(ids) + assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) + + +def test_item_search_spatial_query_post(app_client, load_test_data): + """Test POST search with spatial query (core)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + } + resp = app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +def test_item_search_temporal_query_post(app_client, load_test_data): + """Test POST search with single-tailed spatio-temporal query (core)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + item_date = datetime.strptime(test_item["properties"]["datetime"], DATETIME_RFC339) + item_date = item_date + timedelta(seconds=1) + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": item_date.strftime(DATETIME_RFC339), + } + resp = app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +def test_item_search_temporal_window_post(app_client, load_test_data): + """Test POST search with two-tailed spatio-temporal query (core)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + item_date = datetime.strptime(test_item["properties"]["datetime"], DATETIME_RFC339) + item_date_before = item_date - timedelta(seconds=1) + item_date_after = item_date + timedelta(seconds=1) + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": f"{item_date_before.strftime(DATETIME_RFC339)}/{item_date_after.strftime(DATETIME_RFC339)}", + } + resp = app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +def test_item_search_temporal_open_window(app_client, load_test_data): + """Test POST search with open spatio-temporal query (core)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + params = { + "collections": [test_item["collection"]], + "intersects": test_item["geometry"], + "datetime": "../..", + } + resp = app_client.post("/search", json=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +def test_item_search_sort_post(app_client, load_test_data): + """Test POST search with sorting (sort extension)""" + first_item = load_test_data("test_item.json") + item_date = datetime.strptime(first_item["properties"]["datetime"], DATETIME_RFC339) + resp = app_client.post( + f"/collections/{first_item['collection']}/items", json=first_item + ) + assert resp.status_code == 200 + + second_item = load_test_data("test_item.json") + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime(DATETIME_RFC339) + resp = app_client.post( + f"/collections/{second_item['collection']}/items", json=second_item + ) + assert resp.status_code == 200 + + params = { + "collections": [first_item["collection"]], + "sortby": [{"field": "datetime", "direction": "desc"}], + } + resp = app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == first_item["id"] + assert resp_json["features"][1]["id"] == second_item["id"] + + +def test_item_search_by_id_get(app_client, load_test_data): + """Test GET search by item id (core)""" + ids = ["test1", "test2", "test3"] + for id in ids: + test_item = load_test_data("test_item.json") + test_item["id"] = id + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + params = {"collections": test_item["collection"], "ids": ",".join(ids)} + resp = app_client.get("/search", params=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == len(ids) + assert set([feat["id"] for feat in resp_json["features"]]) == set(ids) + + +def test_item_search_bbox_get(app_client, load_test_data): + """Test GET search with spatial query (core)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + params = { + "collections": test_item["collection"], + "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), + } + resp = app_client.get("/search", params=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +def test_item_search_get_without_collections(app_client, load_test_data): + """Test GET search without specifying collections""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + params = { + "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), + } + resp = app_client.get("/search", params=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +def test_item_search_temporal_window_get(app_client, load_test_data): + """Test GET search with spatio-temporal query (core)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + item_date = datetime.strptime(test_item["properties"]["datetime"], DATETIME_RFC339) + item_date_before = item_date - timedelta(seconds=1) + item_date_after = item_date + timedelta(seconds=1) + + params = { + "collections": test_item["collection"], + "bbox": ",".join([str(coord) for coord in test_item["bbox"]]), + "datetime": f"{item_date_before.strftime(DATETIME_RFC339)}/{item_date_after.strftime(DATETIME_RFC339)}", + } + resp = app_client.get("/search", params=params) + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +def test_item_search_sort_get(app_client, load_test_data): + """Test GET search with sorting (sort extension)""" + first_item = load_test_data("test_item.json") + item_date = datetime.strptime(first_item["properties"]["datetime"], DATETIME_RFC339) + resp = app_client.post( + f"/collections/{first_item['collection']}/items", json=first_item + ) + assert resp.status_code == 200 + + second_item = load_test_data("test_item.json") + second_item["id"] = "another-item" + another_item_date = item_date - timedelta(days=1) + second_item["properties"]["datetime"] = another_item_date.strftime(DATETIME_RFC339) + resp = app_client.post( + f"/collections/{second_item['collection']}/items", json=second_item + ) + assert resp.status_code == 200 + params = {"collections": [first_item["collection"]], "sortby": "-datetime"} + resp = app_client.get("/search", params=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == first_item["id"] + assert resp_json["features"][1]["id"] == second_item["id"] + + +def test_item_search_post_without_collection(app_client, load_test_data): + """Test POST search without specifying a collection""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + params = { + "bbox": test_item["bbox"], + } + resp = app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == test_item["id"] + + +def test_item_search_properties_jsonb(app_client, load_test_data): + """Test POST search with JSONB query (query extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + # EPSG is a JSONB key + params = {"query": {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}}} + resp = app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +def test_item_search_properties_field(app_client, load_test_data): + """Test POST search indexed field with query (query extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + # Orientation is an indexed field + params = {"query": {"orientation": {"eq": "south"}}} + resp = app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert len(resp_json["features"]) == 0 + + +def test_item_search_get_query_extension(app_client, load_test_data): + """Test GET search with JSONB query (query extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + # EPSG is a JSONB key + params = { + "collections": [test_item["collection"]], + "query": json.dumps( + {"proj:epsg": {"gt": test_item["properties"]["proj:epsg"] + 1}} + ), + } + resp = app_client.get("/search", params=params) + assert resp.json()["context"]["returned"] == 0 + + params["query"] = json.dumps( + {"proj:epsg": {"eq": test_item["properties"]["proj:epsg"]}} + ) + resp = app_client.get("/search", params=params) + resp_json = resp.json() + assert resp_json["context"]["returned"] == 1 + assert ( + resp_json["features"][0]["properties"]["proj:epsg"] + == test_item["properties"]["proj:epsg"] + ) + + +def test_get_missing_item_collection(app_client): + """Test reading a collection which does not exist""" + resp = app_client.get("/collections/invalid-collection/items") + assert resp.status_code == 404 + + +def test_pagination_item_collection(app_client, load_test_data): + """Test item collection pagination links (paging extension)""" + test_item = load_test_data("test_item.json") + ids = [] + + # Ingest 5 items + for idx in range(5): + uid = str(uuid.uuid4()) + test_item["id"] = uid + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + ids.append(uid) + + # Paginate through all 5 items with a limit of 1 (expecting 5 requests) + page = app_client.get( + f"/collections/{test_item['collection']}/items", params={"limit": 1} + ) + idx = 0 + item_ids = [] + while True: + idx += 1 + page_data = page.json() + item_ids.append(page_data["features"][0]["id"]) + next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) + if not next_link: + break + query_params = parse_qs(urlparse(next_link[0]["href"]).query) + page = app_client.get( + f"/collections/{test_item['collection']}/items", params=query_params + ) + + # Our limit is 1 so we expect len(ids) number of requests before we run out of pages + assert idx == len(ids) + + # Confirm we have paginated through all items + assert not set(item_ids) - set(ids) + + +def test_pagination_post(app_client, load_test_data): + """Test POST pagination (paging extension)""" + test_item = load_test_data("test_item.json") + ids = [] + + # Ingest 5 items + for idx in range(5): + uid = str(uuid.uuid4()) + test_item["id"] = uid + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + ids.append(uid) + + # Paginate through all 5 items with a limit of 1 (expecting 5 requests) + request_body = {"ids": ids, "limit": 1} + page = app_client.post("/search", json=request_body) + idx = 0 + item_ids = [] + while True: + idx += 1 + page_data = page.json() + item_ids.append(page_data["features"][0]["id"]) + next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) + if not next_link: + break + # Merge request bodies + request_body.update(next_link[0]["body"]) + page = app_client.post("/search", json=request_body) + + # Our limit is 1 so we expect len(ids) number of requests before we run out of pages + assert idx == len(ids) + + # Confirm we have paginated through all items + assert not set(item_ids) - set(ids) + + +def test_pagination_token_idempotent(app_client, load_test_data): + """Test that pagination tokens are idempotent (paging extension)""" + test_item = load_test_data("test_item.json") + ids = [] + + # Ingest 5 items + for idx in range(5): + uid = str(uuid.uuid4()) + test_item["id"] = uid + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + ids.append(uid) + + page = app_client.get("/search", params={"ids": ",".join(ids), "limit": 3}) + page_data = page.json() + next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) + + # Confirm token is idempotent + resp1 = app_client.get( + "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) + ) + resp2 = app_client.get( + "/search", params=parse_qs(urlparse(next_link[0]["href"]).query) + ) + resp1_data = resp1.json() + resp2_data = resp2.json() + + # Two different requests with the same pagination token should return the same items + assert [item["id"] for item in resp1_data["features"]] == [ + item["id"] for item in resp2_data["features"] + ] + + +def test_field_extension_get(app_client, load_test_data): + """Test GET search with included fields (fields extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + params = {"fields": "+properties.proj:epsg,+properties.eo:gsd"} + resp = app_client.get("/search", params=params) + feat_properties = resp.json()["features"][0]["properties"] + assert not set(feat_properties) - {"proj:epsg", "eo:gsd", "datetime"} + + +def test_field_extension_post(app_client, load_test_data): + """Test POST search with included and excluded fields (fields extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + body = { + "fields": { + "exclude": ["assets.B1"], + "include": ["properties.eo:cloud_cover", "properties.orientation"], + } + } + + resp = app_client.post("/search", json=body) + resp_json = resp.json() + assert "B1" not in resp_json["features"][0]["assets"].keys() + assert not set(resp_json["features"][0]["properties"]) - { + "orientation", + "eo:cloud_cover", + "datetime", + } + + +def test_field_extension_exclude_and_include(app_client, load_test_data): + """Test POST search including/excluding same field (fields extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + body = { + "fields": { + "exclude": ["properties.eo:cloud_cover"], + "include": ["properties.eo:cloud_cover"], + } + } + + resp = app_client.post("/search", json=body) + resp_json = resp.json() + assert "eo:cloud_cover" not in resp_json["features"][0]["properties"] + + +def test_field_extension_exclude_default_includes(app_client, load_test_data): + """Test POST search excluding a forbidden field (fields extension)""" + test_item = load_test_data("test_item.json") + resp = app_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 200 + + body = {"fields": {"exclude": ["geometry"],}} + + resp = app_client.post("/search", json=body) + resp_json = resp.json() + assert "geometry" in resp_json["features"][0] + + +def test_search_intersects_and_bbox(app_client): + """Test POST search intersects and bbox are mutually exclusive (core)""" + bbox = [-118, 34, -117, 35] + geoj = Polygon.from_bounds(*bbox).__geo_interface__ + params = {"bbox": bbox, "intersects": geoj} + resp = app_client.post("/search", json=params) + assert resp.status_code == 422 + + +def test_get_missing_item(app_client, load_test_data): + """Test read item which does not exist (transactions extension)""" + test_coll = load_test_data("test_collection.json") + resp = app_client.get(f"/collections/{test_coll['id']}/items/invalid-item") + assert resp.status_code == 404 + + +def test_create_item_database_error(load_test_data): + """Test 424 is raised on database error""" + test_item = load_test_data("test_item.json") + with create_test_client_with_error( + client=ItemCrudClient, + mocked_method="create", + dependency=item_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.post( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 424 + + +def test_read_item_database_error(load_test_data): + """Test 424 is raised on database error""" + test_item = load_test_data("test_item.json") + with create_test_client_with_error( + client=ItemCrudClient, + mocked_method="read", + dependency=item_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.get( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert resp.status_code == 424 + + +def test_update_item_database_error(load_test_data): + """Test 424 is raised on database error""" + test_item = load_test_data("test_item.json") + with create_test_client_with_error( + client=ItemCrudClient, + mocked_method="update", + dependency=item_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.put( + f"/collections/{test_item['collection']}/items", json=test_item + ) + assert resp.status_code == 424 + + +def test_delete_item_database_error(load_test_data): + """Test 424 is raised on database error""" + test_item = load_test_data("test_item.json") + with create_test_client_with_error( + client=ItemCrudClient, + mocked_method="delete", + dependency=item_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.delete( + f"/collections/{test_item['collection']}/items/{test_item['id']}" + ) + assert resp.status_code == 424 + + +def test_get_item_collection_database_error(load_test_data): + """Test 424 is raised on database error""" + test_collection = load_test_data("test_collection.json") + with create_test_client_with_error( + client=CollectionCrudClient, + mocked_method="get_item_collection", + dependency=collection_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.get(f"/collections/{test_collection['id']}/items") + assert resp.status_code == 424 + + +def test_item_search_database_error(load_test_data): + """Test 424 is raised on database error""" + test_item = load_test_data("test_item.json") + + params = { + "collections": [test_item["collection"]], + "ids": [test_item["id"]], + "sort": {"field": "datetime", "direction": "desc"}, + } + with create_test_client_with_error( + client=ItemCrudClient, + mocked_method="stac_search", + dependency=item_crud_client_factory, + error=DatabaseError(message="error"), + ) as test_client: + resp = test_client.post(f"/search", json=params) + assert resp.status_code == 424 diff --git a/tests/resources/test_mgmt.py b/tests/resources/test_mgmt.py new file mode 100644 index 000000000..0a11e38e8 --- /dev/null +++ b/tests/resources/test_mgmt.py @@ -0,0 +1,9 @@ +def test_ping_no_param(app_client): + """ + Test ping endpoint with a mocked client. + Args: + app_client (TestClient): mocked client fixture + """ + res = app_client.get("/_mgmt/ping") + assert res.status_code == 200 + assert res.json() == {"message": "PONG"}