diff --git a/.github/workflows/test_client_ubuntu.yml b/.github/workflows/test_client_ubuntu.yml index 8af0ce8d..a20de2dd 100644 --- a/.github/workflows/test_client_ubuntu.yml +++ b/.github/workflows/test_client_ubuntu.yml @@ -18,8 +18,95 @@ concurrency: cancel-in-progress: true jobs: - build: - + online_unit_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies + run: | + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/unit/ -x -m online -c /dev/null -p no:warnings -n 0 -v + offline_unit_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies + run: | + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/unit/ -x -m offline -c /dev/null -p no:warnings -n 0 -v + online_functional_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies + run: | + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/functional/ -x -m online -c /dev/null -p no:warnings -n 0 -v + offline_functional_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies + run: | + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/functional/ -x -m offline -c /dev/null -p no:warnings -n 0 -v + other_unit_tests: runs-on: ubuntu-latest timeout-minutes: 40 steps: @@ -40,9 +127,26 @@ jobs: env: SIMVUE_URL: ${{ secrets.SIMVUE_URL }} SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} - run: python -m pytest tests/ -x --cov --cov-report=xml -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v - - name: Upload coverage reports to Codecov + run: python -m pytest tests/unit/ -x -m 'not offline' -m 'not online' -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v + other_functional_tests: + runs-on: ubuntu-latest + timeout-minutes: 40 + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" + - name: Install dependencies run: | - curl -Os https://uploader.codecov.io/latest/linux/codecov - chmod +x codecov - ./codecov -t ${CODECOV_TOKEN} + python -m pip install poetry + poetry self add poetry-plugin-export + poetry export -f requirements.txt --with dev -o requirements.txt --all-extras + python -m pip install torch --index-url https://download.pytorch.org/whl/cpu + python -m pip install -r requirements.txt + python -m pip install . + - name: Test with pytest + env: + SIMVUE_URL: ${{ secrets.SIMVUE_URL }} + SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} + run: python -m pytest tests/functional/ -x -m 'not offline' -m 'not online' -m 'not scenario' -c /dev/null -p no:warnings -n 0 -v diff --git a/CHANGELOG.md b/CHANGELOG.md index fcffc1f4..84903f30 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Change log - ## Unreleased +* Removed CodeCarbon dependence in favour of a slimmer solution using the CO2 Signal API. * Added sorting to server queries, users can now specify to sort by columns during data retrieval from the database. ## [v2.0.1](https://github.com/simvue-io/client/releases/tag/v2.0.1) - 2025-03-24 * Improvements to docstrings on methods, classes and functions. diff --git a/CITATION.cff b/CITATION.cff index f8a502ec..27cc4c8d 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -42,67 +42,5 @@ keywords: - alerting - simulation license: Apache-2.0 -commit: 5084f9c17ae4b7f77e188f5aa8a51ebd3f790c9e version: 2.0.1 date-released: '2025-03-24' -references: -- title: mlco2/codecarbon - version: v2.8.2 - type: software - date-released: 2024-12-08 - doi: 10.5281/zenodo.14518377 - url: https://doi.org/10.5281/zenodo.14518377 - repository-code: https://github.com/mlco2/codecarbon - authors: - - given-names: Benoit - family-names: Courty - - given-names: Victor - family-names: Schmidt - - given-names: Sasha - family-names: Luccioni - - given-names: Goyal - family-names: Kamal - - given-names: Marion - family-names: Coutarel - - given-names: Boris - family-names: Feld - - given-names: Jérémy - family-names: Lecourt - - given-names: Liam - family-names: Connell - - given-names: Amine - family-names: Saboni - - given-names: Mathilde - family-names: Léval - - given-names: Luis - family-names: Blanche - - given-names: Alexis - family-names: Cruveiller - - given-names: Franklin - family-names: Zhao - - given-names: Aditya - family-names: Joshi - - given-names: Alexis - family-names: Bogroff - - given-names: Hugues - family-names: de Lavoreille - - given-names: Niko - family-names: Laskaris - - given-names: Edoardo - family-names: Abati - - given-names: Douglas - family-names: Blank - - given-names: Ziyao - family-names: Wang - - given-names: Armin - family-names: Catovic - - given-names: Marc - family-names: Alencon - - given-names: Michał - family-names: Stęchły - - given-names: Christian - family-names: Bauer - - given-names: Lucas Otávio N. - family-names: de Araújo - - given-names: Minerva - family-names: Books diff --git a/poetry.lock b/poetry.lock index b5bf687b..57124a2b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,51 +13,6 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] -[[package]] -name = "anyio" -version = "4.9.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, - {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} - -[package.extras] -doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] -trio = ["trio (>=0.26.1)"] - -[[package]] -name = "arrow" -version = "1.3.0" -description = "Better dates & times for Python" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, -] - -[package.dependencies] -python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" - -[package.extras] -doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] - [[package]] name = "attrs" version = "25.3.0" @@ -92,87 +47,6 @@ files = [ {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] -[[package]] -name = "cffi" -version = "1.17.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" and platform_python_implementation != \"PyPy\" or python_version >= \"3.12\" and platform_python_implementation != \"PyPy\"" -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, -] - -[package.dependencies] -pycparser = "*" - [[package]] name = "charset-normalizer" version = "3.4.1" @@ -292,37 +166,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "codecarbon" -version = "2.8.3" -description = "" -optional = false -python-versions = ">=3.7" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "codecarbon-2.8.3-py3-none-any.whl", hash = "sha256:d3204852ad0c83d94d0f16b7d922e7f540c1e5f488d911f3e75408fe29f4ef4c"}, - {file = "codecarbon-2.8.3.tar.gz", hash = "sha256:037dd5afa1c5f60154f893ecd1631e0c849786edcfc9ff34a7ef467707891269"}, -] - -[package.dependencies] -arrow = "*" -click = "*" -fief-client = {version = "*", extras = ["cli"]} -pandas = "*" -prometheus-client = "*" -psutil = "*" -py-cpuinfo = "*" -pynvml = "*" -questionary = "*" -rapidfuzz = "*" -requests = "*" -rich = "*" -typer = "*" - -[package.extras] -viz = ["dash", "dash-bootstrap-components (<1.0.0)", "fire"] - [[package]] name = "colorama" version = "0.4.6" @@ -491,65 +334,6 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] -[[package]] -name = "cryptography" -version = "44.0.2" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, - {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, - {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, - {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, - {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, - {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, - {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, - {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, - {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, - {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, - {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] -docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] -sdist = ["build (>=1.0.0)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] -test-randomorder = ["pytest-randomly"] - [[package]] name = "cycler" version = "0.12.1" @@ -567,6 +351,19 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "decorator" +version = "5.2.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, +] + [[package]] name = "deepmerge" version = "2.0" @@ -628,7 +425,7 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["main", "dev"] +groups = ["dev"] markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, @@ -654,29 +451,6 @@ files = [ [package.extras] testing = ["hatch", "pre-commit", "pytest", "tox"] -[[package]] -name = "fief-client" -version = "0.20.0" -description = "Fief Client for Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "fief_client-0.20.0-py3-none-any.whl", hash = "sha256:425f40cc7c45c651daec63da402e033c53d91dcaa3f9bf208873fd8692fc16dc"}, - {file = "fief_client-0.20.0.tar.gz", hash = "sha256:dbfb906d03c4a5402ceac5c843aa4708535fb6f5d5c1c4e263ec06fbbbc434d7"}, -] - -[package.dependencies] -httpx = ">=0.21.3,<0.28.0" -jwcrypto = ">=1.4,<2.0.0" -yaspin = {version = "*", optional = true, markers = "extra == \"cli\""} - -[package.extras] -cli = ["yaspin"] -fastapi = ["fastapi", "makefun (>=1.14.0,<2.0.0)"] -flask = ["flask"] - [[package]] name = "fire" version = "0.7.0" @@ -780,103 +554,73 @@ unicode = ["unicodedata2 (>=15.1.0)"] woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] -name = "gitdb" -version = "4.0.12" -description = "Git Object Database" +name = "future" +version = "1.0.0" +description = "Clean single-source support for Python 3 and 2" optional = false -python-versions = ">=3.7" +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"}, - {file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"}, + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, ] -[package.dependencies] -smmap = ">=3.0.1,<6" - [[package]] -name = "gitpython" -version = "3.1.44" -description = "GitPython is a Python library used to interact with Git repositories" +name = "geocoder" +version = "1.38.1" +description = "Geocoder is a simple and consistent geocoding library." optional = false -python-versions = ">=3.7" +python-versions = "*" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, - {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, + {file = "geocoder-1.38.1-py2.py3-none-any.whl", hash = "sha256:a733e1dfbce3f4e1a526cac03aadcedb8ed1239cf55bd7f3a23c60075121a834"}, + {file = "geocoder-1.38.1.tar.gz", hash = "sha256:c9925374c961577d0aee403b09e6f8ea1971d913f011f00ca70c76beaf7a77e7"}, ] [package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +click = "*" +future = "*" +ratelim = "*" +requests = "*" +six = "*" [[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +name = "gitdb" +version = "4.0.12" +description = "Git Object Database" optional = false python-versions = ">=3.7" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, + {file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"}, + {file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"}, ] [package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<1.0)"] +smmap = ">=3.0.1,<6" [[package]] -name = "httpx" -version = "0.27.2" -description = "The next generation HTTP client." +name = "gitpython" +version = "3.1.44" +description = "GitPython is a Python library used to interact with Git repositories" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, + {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, + {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, ] [package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" +gitdb = ">=4.0.1,<5" [package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] +doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] [[package]] name = "humanfriendly" @@ -969,23 +713,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "jwcrypto" -version = "1.5.6" -description = "Implementation of JOSE Web standards" -optional = false -python-versions = ">= 3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, - {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, -] - -[package.dependencies] -cryptography = ">=3.4" -typing-extensions = ">=4.5.0" - [[package]] name = "kiwisolver" version = "1.4.8" @@ -1077,32 +804,6 @@ files = [ {file = "kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e"}, ] -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - [[package]] name = "markupsafe" version = "3.0.2" @@ -1234,19 +935,6 @@ python-dateutil = ">=2.7" [package.extras] dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - [[package]] name = "msgpack" version = "1.1.0" @@ -1413,19 +1101,6 @@ files = [ {file = "numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f"}, ] -[[package]] -name = "nvidia-ml-py" -version = "12.570.86" -description = "Python Bindings for the NVIDIA Management Library" -optional = false -python-versions = "*" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "nvidia_ml_py-12.570.86-py3-none-any.whl", hash = "sha256:58907de35a845abd13dcb227f18298f3b5dd94a72d04c9e594e77711e95c0b51"}, - {file = "nvidia_ml_py-12.570.86.tar.gz", hash = "sha256:0508d4a0c7b6d015cf574530b95a62ed4fc89da3b8b47e1aefe6777db170ec8b"}, -] - [[package]] name = "packaging" version = "24.2" @@ -1654,38 +1329,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "prometheus-client" -version = "0.21.1" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, - {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.50" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.8.0" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, - {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, -] - -[package.dependencies] -wcwidth = "*" - [[package]] name = "psutil" version = "6.1.1" @@ -1731,32 +1374,6 @@ files = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] -[[package]] -name = "py-cpuinfo" -version = "9.0.0" -description = "Get CPU info with pure Python" -optional = false -python-versions = "*" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, - {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" and platform_python_implementation != \"PyPy\" or python_version >= \"3.12\" and platform_python_implementation != \"PyPy\"" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - [[package]] name = "pydantic" version = "2.10.6" @@ -1893,22 +1510,6 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pygments" -version = "2.19.1" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - [[package]] name = "pyjwt" version = "2.10.1" @@ -1928,25 +1529,6 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] -[[package]] -name = "pynvml" -version = "12.0.0" -description = "Python utilities for the NVIDIA Management Library" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "pynvml-12.0.0-py3-none-any.whl", hash = "sha256:fdff84b62a27dbe98e08e1a647eb77342bef1aebe0878bcd15e99a83fcbecb9e"}, - {file = "pynvml-12.0.0.tar.gz", hash = "sha256:299ce2451a6a17e6822d6faee750103e25b415f06f59abb8db65d30f794166f5"}, -] - -[package.dependencies] -nvidia-ml-py = ">=12.0.0,<13.0.0a0" - -[package.extras] -test = ["pytest (>=3.6)", "pytest-cov", "pytest-runner"] - [[package]] name = "pyparsing" version = "3.2.3" @@ -2079,6 +1661,22 @@ termcolor = ">=2.1.0" [package.extras] dev = ["black", "flake8", "pre-commit"] +[[package]] +name = "pytest-timeout" +version = "2.3.1" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + [[package]] name = "pytest-xdist" version = "3.6.1" @@ -2130,22 +1728,6 @@ files = [ {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] -[[package]] -name = "questionary" -version = "2.1.0" -description = "Python library to build pretty command line user prompts ⭐️" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "questionary-2.1.0-py3-none-any.whl", hash = "sha256:44174d237b68bc828e4878c763a9ad6790ee61990e0ae72927694ead57bab8ec"}, - {file = "questionary-2.1.0.tar.gz", hash = "sha256:6302cdd645b19667d8f6e6634774e9538bfcd1aad9be287e743d96cacaf95587"}, -] - -[package.dependencies] -prompt_toolkit = ">=2.0,<4.0" - [[package]] name = "randomname" version = "0.2.1" @@ -2162,112 +1744,20 @@ files = [ fire = "*" [[package]] -name = "rapidfuzz" -version = "3.12.2" -description = "rapid fuzzy string matching" +name = "ratelim" +version = "0.1.6" +description = "Makes it easy to respect rate limits." optional = false -python-versions = ">=3.9" +python-versions = "*" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "rapidfuzz-3.12.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b9a75e0385a861178adf59e86d6616cbd0d5adca7228dc9eeabf6f62cf5b0b1"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6906a7eb458731e3dd2495af1d0410e23a21a2a2b7ced535e6d5cd15cb69afc5"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4b3334a8958b689f292d5ce8a928140ac98919b51e084f04bf0c14276e4c6ba"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85a54ce30345cff2c79cbcffa063f270ad1daedd0d0c3ff6e541d3c3ba4288cf"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acb63c5072c08058f8995404201a52fc4e1ecac105548a4d03c6c6934bda45a3"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5385398d390c6571f0f2a7837e6ddde0c8b912dac096dc8c87208ce9aaaa7570"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5032cbffa245b4beba0067f8ed17392ef2501b346ae3c1f1d14b950edf4b6115"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:195adbb384d89d6c55e2fd71e7fb262010f3196e459aa2f3f45f31dd7185fe72"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f43b773a4d4950606fb25568ecde5f25280daf8f97b87eb323e16ecd8177b328"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:55a43be0e0fa956a919043c19d19bd988991d15c59f179d413fe5145ed9deb43"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:71cf1ea16acdebe9e2fb62ee7a77f8f70e877bebcbb33b34e660af2eb6d341d9"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a3692d4ab36d44685f61326dca539975a4eda49b2a76f0a3df177d8a2c0de9d2"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-win32.whl", hash = "sha256:09227bd402caa4397ba1d6e239deea635703b042dd266a4092548661fb22b9c6"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:0f05b7b95f9f87254b53fa92048367a8232c26cee7fc8665e4337268c3919def"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-win_arm64.whl", hash = "sha256:6938738e00d9eb6e04097b3f565097e20b0c398f9c58959a2bc64f7f6be3d9da"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9c4d984621ae17404c58f8d06ed8b025e167e52c0e6a511dfec83c37e9220cd"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f9132c55d330f0a1d34ce6730a76805323a6250d97468a1ca766a883d6a9a25"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b343b6cb4b2c3dbc8d2d4c5ee915b6088e3b144ddf8305a57eaab16cf9fc74"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24081077b571ec4ee6d5d7ea0e49bc6830bf05b50c1005028523b9cd356209f3"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c988a4fc91856260355773bf9d32bebab2083d4c6df33fafeddf4330e5ae9139"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:780b4469ee21cf62b1b2e8ada042941fd2525e45d5fb6a6901a9798a0e41153c"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd84b0a323885493c893bad16098c5e3b3005d7caa995ae653da07373665d97"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efa22059c765b3d8778083805b199deaaf643db070f65426f87d274565ddf36a"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:095776b11bb45daf7c2973dd61cc472d7ea7f2eecfa454aef940b4675659b92f"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7e2574cf4aa86065600b664a1ac7b8b8499107d102ecde836aaaa403fc4f1784"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d5a3425a6c50fd8fbd991d8f085ddb504791dae6ef9cc3ab299fea2cb5374bef"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:97fb05e1ddb7b71a054040af588b0634214ee87cea87900d309fafc16fd272a4"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-win32.whl", hash = "sha256:b4c5a0413589aef936892fbfa94b7ff6f7dd09edf19b5a7b83896cc9d4e8c184"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:58d9ae5cf9246d102db2a2558b67fe7e73c533e5d769099747921232d88b9be2"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-win_arm64.whl", hash = "sha256:7635fe34246cd241c8e35eb83084e978b01b83d5ef7e5bf72a704c637f270017"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1d982a651253ffe8434d9934ff0c1089111d60502228464721a2a4587435e159"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02e6466caa0222d5233b1f05640873671cd99549a5c5ba4c29151634a1e56080"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e956b3f053e474abae69ac693a52742109d860ac2375fe88e9387d3277f4c96c"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dee7d740a2d5418d4f964f39ab8d89923e6b945850db833e798a1969b19542a"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a057cdb0401e42c84b6516c9b1635f7aedd5e430c6e388bd5f6bcd1d6a0686bb"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dccf8d4fb5b86d39c581a59463c596b1d09df976da26ff04ae219604223d502f"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21d5b3793c6f5aecca595cd24164bf9d3c559e315ec684f912146fc4e769e367"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:46a616c0e13cff2de1761b011e0b14bb73b110182f009223f1453d505c9a975c"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19fa5bc4301a1ee55400d4a38a8ecf9522b0391fc31e6da5f4d68513fe5c0026"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:544a47190a0d25971658a9365dba7095397b4ce3e897f7dd0a77ca2cf6fa984e"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f21af27c5e001f0ba1b88c36a0936437dfe034c452548d998891c21125eb640f"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b63170d9db00629b5b3f2862114d8d6ee19127eaba0eee43762d62a25817dbe0"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-win32.whl", hash = "sha256:6c7152d77b2eb6bfac7baa11f2a9c45fd5a2d848dbb310acd0953b3b789d95c9"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:1a314d170ee272ac87579f25a6cf8d16a031e1f7a7b07663434b41a1473bc501"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-win_arm64.whl", hash = "sha256:d41e8231326e94fd07c4d8f424f6bed08fead6f5e6688d1e6e787f1443ae7631"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941f31038dba5d3dedcfcceba81d61570ad457c873a24ceb13f4f44fcb574260"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fe2dfc454ee51ba168a67b1e92b72aad251e45a074972cef13340bbad2fd9438"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78fafaf7f5a48ee35ccd7928339080a0136e27cf97396de45259eca1d331b714"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0c7989ff32c077bb8fd53253fd6ca569d1bfebc80b17557e60750e6909ba4fe"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96fa00bc105caa34b6cd93dca14a29243a3a7f0c336e4dcd36348d38511e15ac"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bccfb30c668620c5bc3490f2dc7d7da1cca0ead5a9da8b755e2e02e2ef0dff14"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f9b0adc3d894beb51f5022f64717b6114a6fabaca83d77e93ac7675911c8cc5"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32691aa59577f42864d5535cb6225d0f47e2c7bff59cf4556e5171e96af68cc1"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:758b10380ad34c1f51753a070d7bb278001b5e6fcf544121c6df93170952d705"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:50a9c54c0147b468363119132d514c5024fbad1ed8af12bd8bd411b0119f9208"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e3ceb87c11d2d0fbe8559bb795b0c0604b84cfc8bb7b8720b5c16e9e31e00f41"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f7c9a003002434889255ff5676ca0f8934a478065ab5e702f75dc42639505bba"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-win32.whl", hash = "sha256:cf165a76870cd875567941cf861dfd361a0a6e6a56b936c5d30042ddc9def090"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:55bcc003541f5f16ec0a73bf6de758161973f9e8d75161954380738dd147f9f2"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-win_arm64.whl", hash = "sha256:69f6ecdf1452139f2b947d0c169a605de578efdb72cbb2373cb0a94edca1fd34"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c852cd8bed1516a64fd6e2d4c6f270d4356196ee03fda2af1e5a9e13c34643"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42e7f747b55529a6d0d1588695d71025e884ab48664dca54b840413dea4588d8"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a749fd2690f24ef256b264a781487746bbb95344364fe8fe356f0eef7ef206ba"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a11e1d036170bbafa43a9e63d8c309273564ec5bdfc5439062f439d1a16965a"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfb337f1832c1231e3d5621bd0ebebb854e46036aedae3e6a49c1fc08f16f249"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e88c6e68fca301722fa3ab7fd3ca46998012c14ada577bc1e2c2fc04f2067ca6"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17e1a3a8b4b5125cfb63a6990459b25b87ea769bdaf90d05bb143f8febef076a"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9f8177b24ccc0a843e85932b1088c5e467a7dd7a181c13f84c684b796bea815"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6c506bdc2f304051592c0d3b0e82eed309248ec10cdf802f13220251358375ea"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:30bf15c1ecec2798b713d551df17f23401a3e3653ad9ed4e83ad1c2b06e86100"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:bd9a67cfc83e8453ef17ddd1c2c4ce4a74d448a197764efb54c29f29fb41f611"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7a6eaec2ef658dd650c6eb9b36dff7a361ebd7d8bea990ce9d639b911673b2cb"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-win32.whl", hash = "sha256:d7701769f110332cde45c41759cb2a497de8d2dca55e4c519a46aed5fbb19d1a"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-win_amd64.whl", hash = "sha256:296bf0fd4f678488670e262c87a3e4f91900b942d73ae38caa42a417e53643b1"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-win_arm64.whl", hash = "sha256:7957f5d768de14f6b2715303ccdf224b78416738ee95a028a2965c95f73afbfb"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5fd3ce849b27d063755829cda27a9dab6dbd63be3801f2a40c60ec563a4c90f"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:54e53662d71ed660c83c5109127c8e30b9e607884b7c45d2aff7929bbbd00589"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b9e43cf2213e524f3309d329f1ad8dbf658db004ed44f6ae1cd2919aa997da5"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29ca445e320e5a8df3bd1d75b4fa4ecfa7c681942b9ac65b55168070a1a1960e"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83eb7ef732c2f8533c6b5fbe69858a722c218acc3e1fc190ab6924a8af7e7e0e"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:648adc2dd2cf873efc23befcc6e75754e204a409dfa77efd0fea30d08f22ef9d"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b1e6f48e1ffa0749261ee23a1c6462bdd0be5eac83093f4711de17a42ae78ad"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:1ae9ded463f2ca4ba1eb762913c5f14c23d2e120739a62b7f4cc102eab32dc90"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dda45f47b559be72ecbce45c7f71dc7c97b9772630ab0f3286d97d2c3025ab71"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3745c6443890265513a3c8777f2de4cb897aeb906a406f97741019be8ad5bcc"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36d3ef4f047ed1bc96fa29289f9e67a637ddca5e4f4d3dc7cb7f50eb33ec1664"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:54bb69ebe5ca0bd7527357e348f16a4c0c52fe0c2fcc8a041010467dcb8385f7"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3f2ddd5b99b254039a8c82be5749d4d75943f62eb2c2918acf6ffd586852834f"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8117dab9b26a1aaffab59b4e30f80ac4d55e61ad4139a637c149365960933bee"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40c0f16d62d6553527de3dab2fb69709c4383430ea44bce8fb4711ed4cbc6ae3"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f177e1eb6e4f5261a89c475e21bce7a99064a8f217d2336fb897408f46f0ceaf"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df0cecc2852fcb078ed1b4482fac4fc2c2e7787f3edda8920d9a4c0f51b1c95"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b3c4df0321df6f8f0b61afbaa2ced9622750ee1e619128db57a18533d139820"}, - {file = "rapidfuzz-3.12.2.tar.gz", hash = "sha256:b0ba1ccc22fff782e7152a3d3d0caca44ec4e32dc48ba01c560b8593965b5aa3"}, + {file = "ratelim-0.1.6-py2.py3-none-any.whl", hash = "sha256:e1a7dd39e6b552b7cc7f52169cd66cdb826a1a30198e355d7016012987c9ad08"}, + {file = "ratelim-0.1.6.tar.gz", hash = "sha256:826d32177e11f9a12831901c9fda6679fd5bbea3605910820167088f5acbb11d"}, ] -[package.extras] -all = ["numpy"] +[package.dependencies] +decorator = "*" [[package]] name = "requests" @@ -2292,27 +1782,6 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "rich" -version = "13.9.4" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.8.0" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - [[package]] name = "ruff" version = "0.9.10" @@ -2377,19 +1846,6 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - [[package]] name = "six" version = "1.17.0" @@ -2416,19 +1872,6 @@ files = [ {file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"}, ] -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - [[package]] name = "tabulate" version = "0.9.0" @@ -2534,38 +1977,6 @@ files = [ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -[[package]] -name = "typer" -version = "0.15.2" -description = "Typer, build great CLIs. Easy to code. Based on Python type hints." -optional = false -python-versions = ">=3.7" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc"}, - {file = "typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5"}, -] - -[package.dependencies] -click = ">=8.0.0" -rich = ">=10.11.0" -shellingham = ">=1.3.0" -typing-extensions = ">=3.7.4.3" - -[[package]] -name = "types-python-dateutil" -version = "2.9.0.20241206" -description = "Typing stubs for python-dateutil" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, - {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, -] - [[package]] name = "types-requests" version = "2.32.0.20250306" @@ -2627,39 +2038,10 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "wcwidth" -version = "0.2.13" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "yaspin" -version = "3.0.1" -description = "Yet Another Terminal Spinner" -optional = false -python-versions = ">=3.9,<4.0" -groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" -files = [ - {file = "yaspin-3.0.1-py3-none-any.whl", hash = "sha256:c4b5d2ca23ae664b87a5cd53401c5107cef12668a71d9ee5ea5536045f364121"}, - {file = "yaspin-3.0.1.tar.gz", hash = "sha256:9c04aa69cce9be83e1ea3134a6712e749e6c0c9cd02599023713e6befd7bf369"}, -] - -[package.dependencies] -termcolor = ">=2.3,<3.0" - [extras] plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "fe341bb564b08a5b01370b04ed1d5564476a880713239f4fb37cb095a01491cc" +content-hash = "ce123cae51b9203f8b8838b0c4a1b60645ef4af7498ec5eb8ca4f84584f10203" diff --git a/pyproject.toml b/pyproject.toml index d2bbd621..ad5fab7c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,6 @@ dependencies = [ "gitpython (>=3.1.44,<4.0.0)", "humanfriendly (>=10.0,<11.0)", "randomname (>=0.2.1,<0.3.0)", - "codecarbon (>=2.8.3,<3.0.0)", "numpy (>=2.0.0,<3.0.0)", "flatdict (>=4.0.1,<5.0.0)", "semver (>=3.0.4,<4.0.0)", @@ -54,6 +53,7 @@ dependencies = [ "tenacity (>=9.0.0,<10.0.0)", "typing-extensions (>=4.12.2,<5.0.0) ; python_version < \"3.11\"", "deepmerge (>=2.0,<3.0)", + "geocoder (>=1.38.1,<2.0.0)", ] [project.urls] @@ -78,6 +78,7 @@ pytest-xdist = "^3.6.1" jinja2 = "^3.1.6" types-requests = "^2.32.0.20241016" interrogate = "^1.7.0" +pytest-timeout = "^2.3.1" [build-system] requires = ["poetry-core"] @@ -94,7 +95,7 @@ testpaths = [ "tests" ] markers = [ - "codecarbon: tests for emission metrics", + "eco: tests for emission metrics", "client: tests of Simvue client", "converters: tests for Simvue object converters", "dispatch: test data dispatcher", diff --git a/simvue/api/objects/folder.py b/simvue/api/objects/folder.py index 178d9c1a..6a8fc9aa 100644 --- a/simvue/api/objects/folder.py +++ b/simvue/api/objects/folder.py @@ -10,8 +10,8 @@ import pathlib import typing import datetime +import json -from codecarbon.output_methods.emissions_data import json import pydantic from simvue.exception import ObjectNotFoundError diff --git a/simvue/api/request.py b/simvue/api/request.py index 8dd6a8bd..0235fc20 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -10,9 +10,9 @@ import copy import json as json_module import typing +import logging import http -from codecarbon.external.logger import logging import requests from tenacity import ( retry, @@ -96,7 +96,6 @@ def post( else: data_sent = data - logging.debug(f"POST: {url}\n\tdata={data_sent}") response = requests.post( url, headers=headers, diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 9e0b38bc..e4a5b526 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -7,6 +7,7 @@ """ import logging +import re import time import pydantic import typing @@ -48,12 +49,19 @@ def check_token(cls, v: typing.Any) -> str | None: class OfflineSpecifications(pydantic.BaseModel): cache: pathlib.Path | None = None - country_iso_code: str | None = None + + @pydantic.field_validator("cache") + @classmethod + def check_valid_cache_path(cls, cache: pathlib.Path) -> pathlib.Path: + if not re.fullmatch( + r"^(\/|([a-zA-Z]:\\))?([\w\s.-]+[\\/])*[\w\s.-]*$", f"{cache}" + ): + raise AssertionError(f"Value '{cache}' is not a valid cache path.") + return cache class MetricsSpecifications(pydantic.BaseModel): - resources_metrics_interval: pydantic.PositiveInt | None = -1 - emission_metrics_interval: pydantic.PositiveInt | None = None + system_metrics_interval: pydantic.PositiveInt | None = -1 enable_emission_metrics: bool = False diff --git a/simvue/config/user.py b/simvue/config/user.py index 3de51f6f..7b0c0631 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -33,6 +33,7 @@ from simvue.version import __version__ from simvue.api.request import get as sv_get from simvue.api.url import URL +from simvue.eco.config import EcoConfig logger = logging.getLogger(__name__) @@ -42,7 +43,9 @@ class SimvueConfiguration(pydantic.BaseModel): # Hide values as they contain token and URL - model_config = pydantic.ConfigDict(hide_input_in_errors=True) + model_config = pydantic.ConfigDict( + hide_input_in_errors=True, revalidate_instances="always" + ) client: ClientGeneralOptions = ClientGeneralOptions() server: ServerSpecifications = pydantic.Field( ..., description="Specifications for Simvue server" @@ -50,6 +53,7 @@ class SimvueConfiguration(pydantic.BaseModel): run: DefaultRunSpecifications = DefaultRunSpecifications() offline: OfflineSpecifications = OfflineSpecifications() metrics: MetricsSpecifications = MetricsSpecifications() + eco: EcoConfig = EcoConfig() @classmethod def _load_pyproject_configs(cls) -> dict | None: @@ -135,7 +139,7 @@ def write(self, out_directory: pydantic.DirectoryPath) -> None: @pydantic.model_validator(mode="after") @classmethod - def check_valid_server(cls, values: "SimvueConfiguration") -> bool: + def check_valid_server(cls, values: "SimvueConfiguration") -> "SimvueConfiguration": if os.environ.get("SIMVUE_NO_SERVER_CHECK"): return values diff --git a/simvue/eco.py b/simvue/eco.py deleted file mode 100644 index 76e2d694..00000000 --- a/simvue/eco.py +++ /dev/null @@ -1,132 +0,0 @@ -import typing -import logging -import datetime - -from codecarbon import EmissionsTracker, OfflineEmissionsTracker -from codecarbon.output import BaseOutput as cc_BaseOutput -from simvue.utilities import simvue_timestamp - -if typing.TYPE_CHECKING: - from simvue import Run - from codecarbon.output_methods.emissions_data import EmissionsData - - -logger = logging.getLogger(__file__) - - -class CodeCarbonOutput(cc_BaseOutput): - def __init__(self, run: "Run") -> None: - self._simvue_run = run - self._metrics_step: int = 0 - - def out( - self, total: "EmissionsData", delta: "EmissionsData", meta_update: bool = True - ) -> None: - # Check if the run has been shutdown, if so do nothing - if ( - self._simvue_run._shutdown_event - and self._simvue_run._shutdown_event.is_set() - ): - logger.debug("Terminating CodeCarbon tracker") - return - - if meta_update: - logger.debug("Logging CodeCarbon metadata") - try: - self._simvue_run.update_metadata( - { - "sustainability": { - "country": total.country_name, - "country_iso_code": total.country_iso_code, - "region": total.region, - "codecarbon_version": total.codecarbon_version, - } - } - ) - except AttributeError as e: - logger.error(f"Failed to update metadata: {e}") - try: - _cc_timestamp = datetime.datetime.strptime( - total.timestamp, "%Y-%m-%dT%H:%M:%S" - ) - except ValueError as e: - logger.error(f"Error parsing timestamp: {e}") - return - - logger.debug("Logging CodeCarbon metrics") - try: - self._simvue_run.log_metrics( - metrics={ - "sustainability.emissions.total": total.emissions, - "sustainability.energy_consumed.total": total.energy_consumed, - "sustainability.emissions.delta": delta.emissions, - "sustainability.energy_consumed.delta": delta.energy_consumed, - }, - step=self._metrics_step, - timestamp=simvue_timestamp(_cc_timestamp), - ) - except ArithmeticError as e: - logger.error(f"Failed to log metrics: {e}") - return - - self._metrics_step += 1 - - def live_out(self, total: "EmissionsData", delta: "EmissionsData") -> None: - self.out(total, delta, meta_update=False) - - -class SimvueEmissionsTracker(EmissionsTracker): - def __init__( - self, project_name: str, simvue_run: "Run", metrics_interval: int - ) -> None: - self._simvue_run = simvue_run - logger.setLevel(logging.ERROR) - super().__init__( - project_name=project_name, - measure_power_secs=metrics_interval, - api_call_interval=1, - experiment_id=None, - experiment_name=None, - logging_logger=CodeCarbonOutput(simvue_run), - save_to_logger=True, - allow_multiple_runs=True, - log_level="error", - ) - - def set_measure_interval(self, interval: int) -> None: - """Set the measure interval""" - self._set_from_conf(interval, "measure_power_secs") - - def post_init(self) -> None: - self._set_from_conf(self._simvue_run._id, "experiment_id") - self._set_from_conf(self._simvue_run._name, "experiment_name") - self.start() - - -class OfflineSimvueEmissionsTracker(OfflineEmissionsTracker): - def __init__( - self, project_name: str, simvue_run: "Run", metrics_interval: int - ) -> None: - self._simvue_run = simvue_run - logger.setLevel(logging.ERROR) - super().__init__( - country_iso_code=simvue_run._user_config.offline.country_iso_code, - project_name=project_name, - measure_power_secs=metrics_interval, - api_call_interval=1, - experiment_id=None, - experiment_name=None, - logging_logger=CodeCarbonOutput(simvue_run), - save_to_logger=True, - allow_multiple_runs=True, - log_level="error", - ) - - def set_measure_interval(self, interval: int) -> None: - """Set the measure interval""" - self._set_from_conf(interval, "measure_power_secs") - - def post_init(self) -> None: - self._set_from_conf(self._simvue_run._id, "experiment_id") - self._set_from_conf(self._simvue_run._name, "experiment_name") - self.start() diff --git a/simvue/eco/__init__.py b/simvue/eco/__init__.py new file mode 100644 index 00000000..240c0c06 --- /dev/null +++ b/simvue/eco/__init__.py @@ -0,0 +1,15 @@ +""" +Simvue Eco +========== + +Contains functionality for green IT, monitoring emissions etc. +NOTE: The metrics calculated by these methods should be used for relative +comparisons only. Any values returned should not be taken as absolute. + +""" + +__date__ = "2025-03-06" + +from .emissions_monitor import CO2Monitor as CO2Monitor + +__all__ = ["CO2Monitor"] diff --git a/simvue/eco/api_client.py b/simvue/eco/api_client.py new file mode 100644 index 00000000..e4597ee6 --- /dev/null +++ b/simvue/eco/api_client.py @@ -0,0 +1,148 @@ +""" +CO2 Signal API Client +===================== + +Provides inteface to the CO2 Signal API, +which provides real-time data on the carbon intensity of +electricity generation in different countries. +""" + +__date__ = "2025-02-27" + +import requests +import pydantic +import functools +import http +import logging +import datetime +import geocoder +import geocoder.location +import typing + +CO2_SIGNAL_API_ENDPOINT: str = "https://api.co2signal.com/v1/latest" + + +class CO2SignalData(pydantic.BaseModel): + datetime: datetime.datetime + carbon_intensity: float + fossil_fuel_percentage: float + + +class CO2SignalResponse(pydantic.BaseModel): + disclaimer: str + country_code: str + status: str + data: CO2SignalData + carbon_intensity_units: str + + @classmethod + def from_json_response(cls, json_response: dict) -> "CO2SignalResponse": + _data: dict[str, typing.Any] = json_response["data"] + _co2_signal_data = CO2SignalData( + datetime=datetime.datetime.fromisoformat( + _data["datetime"].replace("Z", "+00:00") + ), + carbon_intensity=_data["carbonIntensity"], + fossil_fuel_percentage=_data["fossilFuelPercentage"], + ) + return cls( + disclaimer=json_response["_disclaimer"], + country_code=json_response["countryCode"], + status=json_response["status"], + data=_co2_signal_data, + carbon_intensity_units=json_response["units"]["carbonIntensity"], + ) + + +@functools.lru_cache() +def _call_geocoder_query() -> typing.Any: + """Call GeoCoder API for IP location + + Cached so this API is only called once per session as required. + """ + return geocoder.ip("me") + + +class APIClient(pydantic.BaseModel): + """ + CO2 Signal API Client + + Provides an interface to the Electricity Maps API. + """ + + co2_api_endpoint: pydantic.HttpUrl = pydantic.HttpUrl(CO2_SIGNAL_API_ENDPOINT) + co2_api_token: pydantic.SecretStr | None = None + timeout: pydantic.PositiveInt = 10 + + def __init__(self, *args, **kwargs) -> None: + """Initialise the CO2 Signal API client. + + Parameters + ---------- + co2_api_endpoint : str + endpoint for CO2 signal API + co2_api_token: str + RECOMMENDED. The API token for the CO2 Signal API, default is None. + timeout : int + timeout for API + """ + super().__init__(*args, **kwargs) + self._logger = logging.getLogger(self.__class__.__name__) + + if not self.co2_api_token: + self._logger.warning( + "⚠️ No API token provided for CO2 Signal, " + "use of a token is strongly recommended." + ) + + self._get_user_location_info() + + def _get_user_location_info(self) -> None: + """Retrieve location information for the current user.""" + self._logger.info("📍 Determining current user location.") + _current_user_loc_data: geocoder.location.BBox = _call_geocoder_query() + self._latitude: float + self._longitude: float + self._latitude, self._longitude = _current_user_loc_data.latlng + self._two_letter_country_code: str = _current_user_loc_data.country # type: ignore + + def get(self) -> CO2SignalResponse: + """Get the current data""" + _params: dict[str, float | str] = { + "lat": self._latitude, + "lon": self._longitude, + "countryCode": self._two_letter_country_code, + } + + if self.co2_api_token: + _params["auth-token"] = self.co2_api_token.get_secret_value() + + self._logger.debug(f"🍃 Retrieving carbon intensity data for: {_params}") + _response = requests.get(f"{self.co2_api_endpoint}", params=_params) + + if _response.status_code != http.HTTPStatus.OK: + try: + _error = _response.json()["error"] + except (AttributeError, KeyError): + _error = _response.text + raise RuntimeError( + f"[{_response.status_code}] Failed to retrieve current CO2 signal data for" + f" country '{self._two_letter_country_code}': {_error}" + ) + + return CO2SignalResponse.from_json_response(_response.json()) + + @property + def country_code(self) -> str: + """Returns the country code""" + return self._two_letter_country_code + + @property + def latitude(self) -> float: + """Returns current latitude""" + return self._latitude + + @property + def longitude(self) -> float: + """Returns current longitude""" + return self._longitude diff --git a/simvue/eco/config.py b/simvue/eco/config.py new file mode 100644 index 00000000..06e57761 --- /dev/null +++ b/simvue/eco/config.py @@ -0,0 +1,54 @@ +""" +Eco Config +========== + +Configuration file extension for configuring the Simvue Eco sub-module. +""" + +__date__ = "2025-03-06" + +import pydantic +import pathlib +import os + +from simvue.config.files import DEFAULT_OFFLINE_DIRECTORY + + +class EcoConfig(pydantic.BaseModel): + """Configurations for CO2 emission metrics gathering. + + Parameters + ---------- + co2_signal_api_token: str | None, optional + the CO2 signal API token (Recommended), default is None + cpu_thermal_design_power: int | None, optional + the TDP for the CPU + gpu_thermal_design_power: int | None, optional + the TDP for each GPU + local_data_directory: str, optional + the directory to store local data, default is Simvue offline directory + """ + + co2_signal_api_token: pydantic.SecretStr | None = None + cpu_thermal_design_power: pydantic.PositiveInt | None = None + cpu_n_cores: pydantic.PositiveInt | None = None + gpu_thermal_design_power: pydantic.PositiveInt | None = None + local_data_directory: pydantic.DirectoryPath | None = pydantic.Field( + None, validate_default=True + ) + intensity_refresh_interval: pydantic.PositiveInt | str | None = pydantic.Field( + default="1 day", gt=2 * 60 + ) + co2_intensity: float | None = None + + @pydantic.field_validator("local_data_directory", mode="before", check_fields=True) + @classmethod + def check_local_data_env( + cls, local_data_directory: pathlib.Path | None + ) -> pathlib.Path: + if _data_directory := os.environ.get("SIMVUE_ECO_DATA_DIRECTORY"): + return pathlib.Path(_data_directory) + if not local_data_directory: + local_data_directory = pathlib.Path(DEFAULT_OFFLINE_DIRECTORY) + local_data_directory.mkdir(exist_ok=True, parents=True) + return local_data_directory diff --git a/simvue/eco/emissions_monitor.py b/simvue/eco/emissions_monitor.py new file mode 100644 index 00000000..b1798e96 --- /dev/null +++ b/simvue/eco/emissions_monitor.py @@ -0,0 +1,326 @@ +""" +CO2 Monitor +=========== + +Provides an interface for estimating CO2 usage for processes on the CPU. +""" + +__author__ = "Kristian Zarebski" +__date__ = "2025-02-27" + +import datetime +import json +import pydantic +import dataclasses +import logging +import humanfriendly +import pathlib +import os.path + +from simvue.eco.api_client import APIClient, CO2SignalResponse + +TIME_FORMAT: str = "%Y_%m_%d_%H_%M_%S" +CO2_SIGNAL_API_INTERVAL_LIMIT: int = 2 * 60 + + +@dataclasses.dataclass +class ProcessData: + cpu_percentage: float = 0.0 + gpu_percentage: float | None = None + power_usage: float = 0.0 + total_energy: float = 0.0 + energy_delta: float = 0.0 + co2_emission: float = 0.0 + co2_delta: float = 0.0 + + +class CO2Monitor(pydantic.BaseModel): + """ + CO2 Monitor + + Provides an interface for estimating CO2 usage for processes on the CPU. + """ + + thermal_design_power_per_cpu: pydantic.PositiveFloat | None + n_cores_per_cpu: pydantic.PositiveInt | None + thermal_design_power_per_gpu: pydantic.PositiveFloat | None + local_data_directory: pydantic.DirectoryPath + intensity_refresh_interval: int | None | str + co2_intensity: float | None + co2_signal_api_token: str | None + offline: bool = False + + def now(self) -> str: + """Return data file timestamp for the current time""" + _now: datetime.datetime = datetime.datetime.now(datetime.UTC) + return _now.strftime(TIME_FORMAT) + + @property + def outdated(self) -> bool: + """Checks if the current data is out of date.""" + if not self.intensity_refresh_interval: + return False + + _now: datetime.datetime = datetime.datetime.now() + _latest_time: datetime.datetime = datetime.datetime.strptime( + self._local_data["last_updated"], TIME_FORMAT + ) + return (_now - _latest_time).seconds > self.intensity_refresh_interval + + def _load_local_data(self) -> dict[str, str | dict[str, str | float]] | None: + """Loads locally stored CO2 intensity data""" + self._data_file_path = self.local_data_directory.joinpath( + "ecoclient_co2_intensity.json" + ) + + if not self._data_file_path.exists(): + return None + + with self._data_file_path.open() as in_f: + _data: dict[str, str | dict[str, str | float]] | None = json.load(in_f) + + return _data or None + + def __init__(self, *args, **kwargs) -> None: + """Initialise a CO2 Monitor. + + Parameters + ---------- + thermal_design_power_per_cpu: float | None + the TDP value for each CPU, default is 80W. + n_cores_per_cpu: int | None + the number of cores in each CPU, default is 4. + thermal_design_power_per_gpu: float | None + the TDP value for each GPU, default is 130W. + local_data_directory: pydantic.DirectoryPath + the directory in which to store CO2 intensity data. + intensity_refresh_interval: int | str | None + the interval in seconds at which to call the CO2 signal API. The default is once per day, + note the API is restricted to 30 requests per hour for a given user. Also accepts a + time period as a string, e.g. '1 week' + co2_intensity: float | None + disable using RestAPIs to retrieve CO2 intensity and instead use this value. + Default is None, use remote data. Value is in kgCO2/kWh + co2_signal_api_token: str + The API token for CO2 signal, default is None. + offline: bool, optional + Run without any server interaction + """ + _logger = logging.getLogger(self.__class__.__name__) + + if not isinstance(kwargs.get("thermal_design_power_per_cpu"), float): + kwargs["thermal_design_power_per_cpu"] = 80.0 + _logger.warning( + "⚠️ No TDP value provided for current CPU, will use arbitrary value of 80W." + ) + + if not isinstance(kwargs.get("n_cores_per_cpu"), float): + kwargs["n_cores_per_cpu"] = 4 + _logger.warning( + "⚠️ No core count provided for current CPU, will use arbitrary value of 4." + ) + + if not isinstance(kwargs.get("thermal_design_power_per_gpu"), float): + kwargs["thermal_design_power_per_gpu"] = 130.0 + _logger.warning( + "⚠️ No TDP value provided for current GPUs, will use arbitrary value of 130W." + ) + super().__init__(*args, **kwargs) + self._last_local_write = datetime.datetime.now() + + if self.intensity_refresh_interval and isinstance( + self.intensity_refresh_interval, str + ): + self.intensity_refresh_interval = int( + humanfriendly.parse_timespan(self.intensity_refresh_interval) + ) + + if ( + self.intensity_refresh_interval + and self.intensity_refresh_interval <= CO2_SIGNAL_API_INTERVAL_LIMIT + ): + raise ValueError( + "Invalid intensity refresh rate, CO2 signal API restricted to 30 calls per hour." + ) + + if self.co2_intensity: + _logger.warning( + f"⚠️ Disabling online data retrieval, using {self.co2_intensity} eqCO2g/kwh for CO2 intensity." + ) + + self._data_file_path: pathlib.Path | None = None + + # Load any local data first, if the data is missing or due a refresh this will be None + self._local_data: dict[str, str | dict[str, float | str]] | None = ( + self._load_local_data() or {} + ) + self._measure_time = datetime.datetime.now() + self._logger = _logger + self._client: APIClient | None = ( + None + if self.co2_intensity or self.offline + else APIClient(co2_api_token=self.co2_signal_api_token, timeout=10) + ) + self._processes: dict[str, ProcessData] = {} + + def check_refresh(self) -> bool: + """Check to see if an intensity value refresh is required. + + Returns + ------- + bool + whether a refresh of the CO2 intensity was requested + from the CO2 Signal API. + """ + # Need to check if the local cache has been modified + # even if running offline + if ( + self._data_file_path.exists() + and ( + _check_write := datetime.datetime.fromtimestamp( + os.path.getmtime(f"{self._data_file_path}") + ) + ) + > self._last_local_write + ): + self._last_local_write = _check_write + with self._data_file_path.open("r") as in_f: + self._local_data = json.load(in_f) + + if not self._client or not self._local_data: + return False + + if ( + not self._local_data.setdefault(self._client.country_code, {}) + or self.outdated + ): + self._logger.info("🌍 CO2 emission outdated, calling API.") + _data: CO2SignalResponse = self._client.get() + self._local_data[self._client.country_code] = _data.model_dump(mode="json") + self._local_data["last_updated"] = self.now() + with self._data_file_path.open("w") as out_f: + json.dump(self._local_data, out_f, indent=2) + return True + return False + + def estimate_co2_emissions( + self, + process_id: str, + cpu_percent: float, + gpu_percent: float | None, + measure_interval: float, + ) -> None: + """Estimate the CO2 emissions""" + self._logger.debug( + f"📐 Estimating CO2 emissions from CPU usage of {cpu_percent}% " + f"and GPU usage of {gpu_percent}%" + if gpu_percent + else f"in interval {measure_interval}s." + ) + + if self._local_data is None: + raise RuntimeError("Expected local data to be initialised.") + + if not self._data_file_path: + raise RuntimeError("Expected local data file to be defined.") + + if not (_process := self._processes.get(process_id)): + self._processes[process_id] = (_process := ProcessData()) + + if self.co2_intensity: + _current_co2_intensity = self.co2_intensity + _co2_units = "kgCO2/kWh" + else: + self.check_refresh() + # If no local data yet then return + if not (_country_codes := list(self._local_data.keys())): + self._logger.warning( + "No CO2 emission data recorded as no CO2 intensity value " + "has been provided and there is no local intensity data available." + ) + return + + if self._client: + _country_code = self._client.country_code + else: + _country_code = _country_codes[0] + self._logger.debug( + f"🗂️ Using data for region '{_country_code}' from local cache for offline estimation." + ) + self._current_co2_data = CO2SignalResponse( + **self._local_data[_country_code] + ) + _current_co2_intensity = self._current_co2_data.data.carbon_intensity + _co2_units = self._current_co2_data.carbon_intensity_units + + _process.gpu_percentage = gpu_percent + _process.cpu_percentage = cpu_percent + _previous_energy: float = _process.total_energy + _process.power_usage = (_process.cpu_percentage / 100.0) * ( + self.thermal_design_power_per_cpu / self.n_cores_per_cpu + ) + + if _process.gpu_percentage and self.thermal_design_power_per_gpu: + _process.power_usage += ( + _process.gpu_percentage / 100.0 + ) * self.thermal_design_power_per_gpu + + _process.total_energy += _process.power_usage * measure_interval + _process.energy_delta = _process.total_energy - _previous_energy + + # Measured value is in g/kWh, convert to kg/kWs + _carbon_intensity_kgpws: float = _current_co2_intensity / (60 * 60 * 1e3) + + _previous_emission: float = _process.co2_emission + + _process.co2_delta = ( + _process.power_usage * _carbon_intensity_kgpws * measure_interval + ) + + _process.co2_emission += _process.co2_delta + + self._logger.debug( + f"📝 For process '{process_id}', recorded: CPU={_process.cpu_percentage:.2f}%, " + f"Power={_process.power_usage:.2f}W, CO2={_process.co2_emission:.2e}{_co2_units}" + ) + + def simvue_metrics(self) -> dict[str, float]: + """Retrieve metrics to send to Simvue server.""" + return { + "sustainability.emissions.total": self.total_co2_emission, + "sustainability.emissions.delta": self.total_co2_delta, + "sustainability.energy_consumed.total": self.total_energy, + "sustainability.energy_consumed.delta": self.total_energy_delta, + } + + @property + def last_process(self) -> str | None: + return list(self._processes.keys())[-1] if self._processes else None + + @property + def process_data(self) -> dict[str, ProcessData]: + return self._processes + + @property + def current_carbon_intensity(self) -> float: + return self.co2_intensity or self._client.get().data.carbon_intensity + + @property + def total_power_usage(self) -> float: + return sum(process.power_usage for process in self._processes.values()) + + @property + def total_co2_emission(self) -> float: + return sum(process.co2_emission for process in self._processes.values()) + + @property + def total_co2_delta(self) -> float: + return sum(process.co2_delta for process in self._processes.values()) + + @property + def total_energy_delta(self) -> float: + return sum(process.energy_delta for process in self._processes.values()) + + @property + def total_energy(self) -> float: + return sum(process.total_energy for process in self._processes.values()) diff --git a/simvue/metrics.py b/simvue/metrics.py index 6b224106..bf5b209d 100644 --- a/simvue/metrics.py +++ b/simvue/metrics.py @@ -10,6 +10,7 @@ import logging import psutil + from .pynvml import ( nvmlDeviceGetComputeRunningProcesses, nvmlDeviceGetCount, @@ -21,12 +22,23 @@ nvmlShutdown, ) +RESOURCES_METRIC_PREFIX: str = "resources" + logger = logging.getLogger(__name__) def get_process_memory(processes: list[psutil.Process]) -> int: - """ - Get the resident set size + """Get the resident set size. + + Parameters + ---------- + processes: list[psutil.Process] + processes to monitor + + Returns + ------- + int + total process memory """ rss: int = 0 for process in processes: @@ -38,11 +50,22 @@ def get_process_memory(processes: list[psutil.Process]) -> int: def get_process_cpu( processes: list[psutil.Process], interval: float | None = None -) -> int: - """ - Get the CPU usage +) -> float: + """Get the CPU usage If first time being called, use a small interval to collect initial CPU metrics. + + Parameters + ---------- + processes: list[psutil.Process] + list of processes to track for CPU usage. + interval: float, optional + interval to measure across, default is None, use previous measure time difference. + + Returns + ------- + float + CPU percentage usage """ cpu_percent: int = 0 for process in processes: @@ -53,8 +76,19 @@ def get_process_cpu( def is_gpu_used(handle, processes: list[psutil.Process]) -> bool: - """ - Check if the GPU is being used by the list of processes + """Check if the GPU is being used by the list of processes. + + Parameters + ---------- + handle: Unknown + connector to GPU API + processes: list[psutil.Process] + list of processes to monitor + + Returns + ------- + bool + if GPU is being used """ pids = [process.pid for process in processes] @@ -65,11 +99,22 @@ def is_gpu_used(handle, processes: list[psutil.Process]) -> bool: return len(list(set(gpu_pids) & set(pids))) > 0 -def get_gpu_metrics(processes: list[psutil.Process]) -> dict[str, float]: - """ - Get GPU metrics +def get_gpu_metrics(processes: list[psutil.Process]) -> list[tuple[float, float]]: + """Get GPU metrics. + + Parameters + ---------- + processes: list[psutil.Process] + list of processes to monitor + + Returns + ------- + list[tuple[float, float]] + For each GPU identified: + - gpu_percent + - gpu_memory """ - gpu_metrics: dict[str, float] = {} + gpu_metrics: list[tuple[float, float]] = [] with contextlib.suppress(Exception): nvmlInit() @@ -80,11 +125,60 @@ def get_gpu_metrics(processes: list[psutil.Process]) -> dict[str, float]: utilisation_percent = nvmlDeviceGetUtilizationRates(handle).gpu memory = nvmlDeviceGetMemoryInfo(handle) memory_percent = 100 * memory.free / memory.total - gpu_metrics[f"resources/gpu.utilisation.percent.{i}"] = ( - utilisation_percent - ) - gpu_metrics[f"resources/gpu.memory.percent.{i}"] = memory_percent + gpu_metrics.append((utilisation_percent, memory_percent)) nvmlShutdown() return gpu_metrics + + +class SystemResourceMeasurement: + """Class for taking and storing a system resources measurement.""" + + def __init__( + self, + processes: list[psutil.Process], + interval: float | None, + cpu_only: bool = False, + ) -> None: + """Perform a measurement of system resource consumption. + + Parameters + ---------- + processes: list[psutil.Process] + processes to measure across. + interval: float | None + interval to measure, if None previous measure time used for interval. + cpu_only: bool, optional + only record CPU information, default False + """ + self.cpu_percent: float | None = get_process_cpu(processes, interval=interval) + self.cpu_memory: float | None = get_process_memory(processes) + self.gpus: list[dict[str, float]] = ( + None if cpu_only else get_gpu_metrics(processes) + ) + + def to_dict(self) -> dict[str, float]: + """Create metrics dictionary for sending to a Simvue server.""" + _metrics: dict[str, float] = { + f"{RESOURCES_METRIC_PREFIX}/cpu.usage.percentage": self.cpu_percent, + f"{RESOURCES_METRIC_PREFIX}/cpu.usage.memory": self.cpu_memory, + } + + for i, gpu in enumerate(self.gpus or []): + _metrics[f"{RESOURCES_METRIC_PREFIX}/gpu.utilisation.percent.{i}"] = gpu[ + "utilisation" + ] + _metrics[f"{RESOURCES_METRIC_PREFIX}/gpu.utilisation.memory.{i}"] = gpu[ + "memory" + ] + + return _metrics + + @property + def gpu_percent(self) -> float: + return sum(m[0] for m in self.gpus or []) / (len(self.gpus or []) or 1) + + @property + def gpu_memory(self) -> float: + return sum(m[1] for m in self.gpus or []) / (len(self.gpus or []) or 1) diff --git a/simvue/run.py b/simvue/run.py index 355cd693..dc5ef56d 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -39,11 +39,11 @@ from .factory.dispatch import Dispatcher from .executor import Executor -from .metrics import get_gpu_metrics, get_process_cpu, get_process_memory +from .metrics import SystemResourceMeasurement from .models import FOLDER_REGEX, NAME_REGEX, MetricKeyString from .system import get_system from .metadata import git_info, environment -from .eco import SimvueEmissionsTracker, OfflineSimvueEmissionsTracker +from .eco import CO2Monitor from .utilities import ( skip_if_failed, validate_timestamp, @@ -175,7 +175,7 @@ def __init__( self._data: dict[str, typing.Any] = {} self._step: int = 0 self._active: bool = False - self._user_config = SimvueConfiguration.fetch( + self._user_config: SimvueConfiguration = SimvueConfiguration.fetch( server_url=server_url, server_token=server_token, mode=mode ) @@ -187,10 +187,10 @@ def __init__( ) self._aborted: bool = False - self._resources_metrics_interval: int | None = ( + self._system_metrics_interval: int | None = ( HEARTBEAT_INTERVAL - if self._user_config.metrics.resources_metrics_interval < 1 - else self._user_config.metrics.resources_metrics_interval + if self._user_config.metrics.system_metrics_interval < 1 + else self._user_config.metrics.system_metrics_interval ) self._headers: dict[str, str] = ( { @@ -209,36 +209,7 @@ def __init__( self._heartbeat_thread: threading.Thread | None = None self._heartbeat_interval: int = HEARTBEAT_INTERVAL - self._emission_metrics_interval: int | None = ( - HEARTBEAT_INTERVAL - if ( - (_interval := self._user_config.metrics.emission_metrics_interval) - and _interval < 1 - ) - else self._user_config.metrics.emission_metrics_interval - ) - if mode == "offline": - if ( - self._user_config.metrics.enable_emission_metrics - and not self._user_config.offline.country_iso_code - ): - raise ValueError( - "Country ISO code must be provided if tracking emissions metrics in offline mode." - ) - - self._emissions_tracker: OfflineSimvueEmissionsTracker | None = ( - OfflineSimvueEmissionsTracker( - "simvue", self, self._emission_metrics_interval - ) - if self._user_config.metrics.enable_emission_metrics - else None - ) - else: - self._emissions_tracker: SimvueEmissionsTracker | None = ( - SimvueEmissionsTracker("simvue", self, self._emission_metrics_interval) - if self._user_config.metrics.enable_emission_metrics - else None - ) + self._emissions_monitor: CO2Monitor | None = None def __enter__(self) -> Self: return self @@ -251,10 +222,6 @@ def _handle_exception_throw( ) -> None: _exception_thrown: str | None = exc_type.__name__ if exc_type else None _is_running: bool = self._status == "running" - _is_running_online: bool = self._id is not None and _is_running - _is_running_offline: bool = ( - self._user_config.run.mode == "offline" and _is_running - ) _is_terminated: bool = ( _exception_thrown is not None and _exception_thrown == "KeyboardInterrupt" ) @@ -326,38 +293,114 @@ def processes(self) -> list[psutil.Process]: return list(set(process_list)) - def _get_sysinfo(self, interval: float | None = None) -> dict[str, typing.Any]: - """Retrieve system administration + def _terminate_run( + self, + abort_callback: typing.Callable[[Self], None] | None, + force_exit: bool = True, + ) -> None: + """Close the current simvue Run and its subprocesses. + + Closes the run and all subprocesses with the default to being also. + To abort the actual Python execution as well. Parameters ---------- - interval : float | None - The interval to use for collection of CPU metrics, by default None (non blocking) + abort_callback: Callable, optional + the callback to execute on the termination else None + force_exit: bool, optional + whether to close Python itself, the default is True + """ + self._alert_raised_trigger.set() + logger.debug("Received abort request from server") - Returns - ------- - dict[str, typing.Any] - retrieved system specifications + if abort_callback is not None: + abort_callback(self) # type: ignore + + if self._abort_on_alert != "ignore": + self.kill_all_processes() + if self._dispatcher and self._shutdown_event: + self._shutdown_event.set() + self._dispatcher.purge() + self._dispatcher.join() + if self._active: + self.set_status("terminated") + click.secho( + "[simvue] Run was aborted.", + fg="red" if self._term_color else None, + bold=self._term_color, + ) + if self._abort_on_alert == "terminate": + os._exit(1) if force_exit else sys.exit(1) + + def _get_internal_metrics( + self, + system_metrics_step: int | None, + emission_metrics_step: int | None, + res_measure_interval: int | None = None, + ems_measure_interval: int | None = None, + ) -> None: + """Refresh resource and emissions metrics. + + Checks if the refresh interval has been satisfied for emissions + and resource metrics, if so adds latest values to dispatch. + + Parameters + ---------- + system_metrics_step: int | None + the current step for this resource metric record, + None if skipping resource metrics. + emission_metrics_step: int | None + the current step for this emission metrics record, + None if skipping emission metrics. + res_measure_interval: int | None, optional + the interval for resource metric gathering, default is None + ems_measure_interval: int | None, optional + the interval for emission metric gathering, default is None + + Return + ------ + tuple[float, float] + new resource metric measure time + new emissions metric measure time """ - processes = self.processes - cpu = get_process_cpu(processes, interval=interval) - memory = get_process_memory(processes) - gpu = get_gpu_metrics(processes) - data: dict[str, typing.Any] = {} - - if memory is not None and cpu is not None: - data = { - f"{RESOURCES_METRIC_PREFIX}/cpu.usage.percent": cpu, - f"{RESOURCES_METRIC_PREFIX}/memory.usage": memory, - } - if gpu: - for item in gpu: - data[item] = gpu[item] - return data + _current_system_measure = SystemResourceMeasurement( + self.processes, + interval=res_measure_interval, + cpu_only=not system_metrics_step, + ) + + if system_metrics_step is not None: + # Set join on fail to false as if an error is thrown + # join would be called on this thread and a thread cannot + # join itself! + self._add_metrics_to_dispatch( + _current_system_measure.to_dict(), + join_on_fail=False, + step=system_metrics_step, + ) + + if ( + self._emissions_monitor + and emission_metrics_step is not None + and ems_measure_interval is not None + and _current_system_measure.cpu_percent is not None + ): + self._emissions_monitor.estimate_co2_emissions( + process_id=f"{self._name}", + cpu_percent=_current_system_measure.cpu_percent, + measure_interval=ems_measure_interval, + gpu_percent=_current_system_measure.gpu_percent, + ) + self._add_metrics_to_dispatch( + self._emissions_monitor.simvue_metrics(), + join_on_fail=False, + step=emission_metrics_step, + ) def _create_heartbeat_callback( self, ) -> typing.Callable[[threading.Event], None]: + """Defines the callback executed at the heartbeat interval for the Run.""" if ( self._user_config.run.mode == "online" and (not self._user_config.server.url or not self._id) @@ -365,44 +408,73 @@ def _create_heartbeat_callback( raise RuntimeError("Could not commence heartbeat, run not initialised") def _heartbeat( - heartbeat_trigger: typing.Optional[ - threading.Event - ] = self._heartbeat_termination_trigger, - abort_callback: typing.Optional[ - typing.Callable[[Self], None] - ] = self._abort_callback, + heartbeat_trigger: threading.Event + | None = self._heartbeat_termination_trigger, + abort_callback: typing.Callable[[Self], None] | None = self._abort_callback, ) -> None: if not heartbeat_trigger: raise RuntimeError("Expected initialisation of heartbeat") - last_heartbeat = time.time() - last_res_metric_call = time.time() + last_heartbeat: float = 0 + last_res_metric_call: float = 0 + last_co2_metric_call: float = 0 - if self._resources_metrics_interval: - self._add_metrics_to_dispatch( - self._get_sysinfo(interval=1), join_on_fail=False, step=0 - ) - res_step = 1 + co2_step: int = 0 + res_step: int = 0 - while not heartbeat_trigger.is_set(): - time.sleep(0.1) + initial_ems_metrics_interval: float = time.time() - self._start_time + while not heartbeat_trigger.is_set(): with self._configuration_lock: - if ( - self._resources_metrics_interval - and (res_time := time.time()) - last_res_metric_call - > self._resources_metrics_interval - ): - # Set join on fail to false as if an error is thrown - # join would be called on this thread and a thread cannot - # join itself! - self._add_metrics_to_dispatch( - self._get_sysinfo(), join_on_fail=False, step=res_step - ) - last_res_metric_call = res_time - res_step += 1 + _current_time: float = time.time() + _update_system_metrics: bool = ( + self._system_metrics_interval is not None + and _current_time - last_res_metric_call + > self._system_metrics_interval + and self._status == "running" + ) + _update_emissions_metrics: bool = ( + self._system_metrics_interval is not None + and self._emissions_monitor + and _current_time - last_co2_metric_call + > self._system_metrics_interval + and self._status == "running" + ) + + # In order to get a resource metric reading at t=0 + # because there is no previous CPU reading yet we cannot + # use the default of None for the interval here, so we measure + # at an interval of 1s. For emissions metrics the first step + # is time since run start + self._get_internal_metrics( + emission_metrics_step=co2_step + if _update_emissions_metrics + else None, + system_metrics_step=res_step + if _update_system_metrics + else None, + res_measure_interval=1 if res_step == 0 else None, + ems_measure_interval=initial_ems_metrics_interval + if co2_step == 0 + else self._system_metrics_interval, + ) + + res_step += 1 + co2_step += 1 + + last_res_metric_call = ( + _current_time + if _update_system_metrics + else last_res_metric_call + ) + last_co2_metric_call = ( + _current_time + if _update_emissions_metrics + else last_co2_metric_call + ) if time.time() - last_heartbeat < self._heartbeat_interval: + time.sleep(1) continue last_heartbeat = time.time() @@ -410,31 +482,13 @@ def _heartbeat( # Check if the user has aborted the run with self._configuration_lock: if self._sv_obj and self._sv_obj.abort_trigger: - self._alert_raised_trigger.set() - logger.debug("Received abort request from server") - - if abort_callback is not None: - abort_callback(self) # type: ignore - - if self._abort_on_alert != "ignore": - self.kill_all_processes() - if self._dispatcher and self._shutdown_event: - self._shutdown_event.set() - self._dispatcher.purge() - self._dispatcher.join() - if self._active: - self.set_status("terminated") - click.secho( - "[simvue] Run was aborted.", - fg="red" if self._term_color else None, - bold=self._term_color, - ) - if self._abort_on_alert == "terminate": - os._exit(1) + self._terminate_run(abort_callback=abort_callback) if self._sv_obj: self._sv_obj.send_heartbeat() + time.sleep(1) + return _heartbeat def _create_dispatch_callback( @@ -457,7 +511,6 @@ def _create_dispatch_callback( def _dispatch_callback( buffer: list[typing.Any], category: typing.Literal["events", "metrics"], - run_obj: RunObject = self._sv_obj, ) -> None: if category == "events": _events = Events.new( @@ -476,14 +529,9 @@ def _dispatch_callback( return _dispatch_callback - def _start(self, reconnect: bool = False) -> bool: + def _start(self) -> bool: """Start a run - Parameters - ---------- - reconnect : bool, optional - whether this is a reconnect to an existing run, by default False - Returns ------- bool @@ -561,10 +609,6 @@ def _error(self, message: str, join_threads: bool = True) -> None: RuntimeError exception throw """ - if self._emissions_tracker: - with contextlib.suppress(Exception): - self._emissions_tracker.stop() - # Stop heartbeat if self._heartbeat_termination_trigger and self._heartbeat_thread: self._heartbeat_termination_trigger.set() @@ -773,10 +817,6 @@ def init( fg="green" if self._term_color else None, ) - if self._emissions_tracker and self._status == "running": - self._emissions_tracker.post_init() - self._emissions_tracker.start() - return True @skip_if_failed("_aborted", "_suppress_errors", None) @@ -983,7 +1023,7 @@ def reconnect(self, run_id: str) -> bool: self._id = run_id self._sv_obj = RunObject(identifier=self._id, _read_only=False) - self._start(reconnect=True) + self._start() return True @@ -1005,7 +1045,6 @@ def set_pid(self, pid: int) -> None: _process.cpu_percent() for _process in self._child_processes + [self._parent_process] ] - time.sleep(0.1) @skip_if_failed("_aborted", "_suppress_errors", False) @pydantic.validate_call @@ -1014,10 +1053,9 @@ def config( *, suppress_errors: bool | None = None, queue_blocking: bool | None = None, - resources_metrics_interval: pydantic.PositiveInt | None = None, - emission_metrics_interval: pydantic.PositiveInt | None = None, + system_metrics_interval: pydantic.PositiveInt | None = None, enable_emission_metrics: bool | None = None, - disable_resources_metrics: bool | None = None, + disable_system_metrics: bool | None = None, storage_id: str | None = None, abort_on_alert: typing.Literal["run", "all", "ignore"] | bool | None = None, ) -> bool: @@ -1030,11 +1068,11 @@ def config( dormant state if an error occurs queue_blocking : bool, optional block thread queues during metric/event recording - resources_metrics_interval : int, optional + system_metrics_interval : int, optional frequency at which to collect resource metrics enable_emission_metrics : bool, optional enable monitoring of emission metrics - disable_resources_metrics : bool, optional + disable_system_metrics : bool, optional disable monitoring of resource metrics storage_id : str, optional identifier of storage to use, by default None @@ -1057,51 +1095,43 @@ def config( if queue_blocking is not None: self._queue_blocking = queue_blocking - if resources_metrics_interval and disable_resources_metrics: + if system_metrics_interval and disable_system_metrics: self._error( "Setting of resource metric interval and disabling resource metrics is ambiguous" ) return False - if disable_resources_metrics: + if disable_system_metrics: self._pid = None - self._resources_metrics_interval = None - - if emission_metrics_interval: - if not enable_emission_metrics: - self._error( - "Cannot set rate of emission metrics, these metrics have been disabled" - ) - return False - self._emission_metrics_interval = emission_metrics_interval + self._system_metrics_interval = None if enable_emission_metrics: if self._user_config.run.mode == "offline": - if not self._user_config.offline.country_iso_code: - self._error( - "Country ISO code must be provided if tracking emissions metrics in offline mode." - ) - self._emissions_tracker: OfflineSimvueEmissionsTracker = ( - OfflineSimvueEmissionsTracker( - "simvue", self, self._emission_metrics_interval - ) + # Create an emissions monitor with no API calls + self._emissions_monitor = CO2Monitor( + intensity_refresh_interval=None, + co2_intensity=self._user_config.eco.co2_intensity, + local_data_directory=self._user_config.eco.local_data_directory, + co2_signal_api_token=None, + thermal_design_power_per_cpu=self._user_config.eco.cpu_thermal_design_power, + thermal_design_power_per_gpu=self._user_config.eco.gpu_thermal_design_power, + offline=True, ) else: - self._emissions_tracker: SimvueEmissionsTracker = ( - SimvueEmissionsTracker( - "simvue", self, self._emission_metrics_interval - ) + self._emissions_monitor = CO2Monitor( + intensity_refresh_interval=self._user_config.eco.intensity_refresh_interval, + local_data_directory=self._user_config.eco.local_data_directory, + co2_signal_api_token=self._user_config.eco.co2_signal_api_token, + co2_intensity=self._user_config.eco.co2_intensity, + thermal_design_power_per_cpu=self._user_config.eco.cpu_thermal_design_power, + thermal_design_power_per_gpu=self._user_config.eco.gpu_thermal_design_power, ) - # If the main Run API object is initialised the run is active - # hence the tracker should start too - if self._sv_obj: - self._emissions_tracker.start() - elif enable_emission_metrics is False and self._emissions_tracker: - self._error("Cannot disable emissions tracker once it has been started") + elif enable_emission_metrics is False and self._emissions_monitor: + self._error("Cannot disable emissions monitor once it has been started") - if resources_metrics_interval: - self._resources_metrics_interval = resources_metrics_interval + if system_metrics_interval: + self._system_metrics_interval = system_metrics_interval if abort_on_alert is not None: if isinstance(abort_on_alert, bool): @@ -1589,10 +1619,6 @@ def set_status( def _tidy_run(self) -> None: self._executor.wait_for_completion() - if self._emissions_tracker: - with contextlib.suppress(Exception): - self._emissions_tracker.stop() - if self._heartbeat_thread and self._heartbeat_termination_trigger: self._heartbeat_termination_trigger.set() self._heartbeat_thread.join() diff --git a/simvue/sender.py b/simvue/sender.py index d747dc9b..47e8f7fc 100644 --- a/simvue/sender.py +++ b/simvue/sender.py @@ -15,6 +15,7 @@ from simvue.config.user import SimvueConfiguration import simvue.api.objects +from simvue.eco.emissions_monitor import CO2Monitor from simvue.version import __version__ UPLOAD_ORDER: list[str] = [ @@ -150,7 +151,8 @@ def sender( max_workers: int = 5, threading_threshold: int = 10, objects_to_upload: list[str] = UPLOAD_ORDER, -): + co2_intensity_refresh: int | None | str = None, +) -> dict[str, str]: """Send data from a local cache directory to the Simvue server. Parameters @@ -163,8 +165,16 @@ def sender( The number of cached files above which threading will be used objects_to_upload : list[str] Types of objects to upload, by default uploads all types of objects present in cache + co2_intensity_refresh: int | None | str + the refresh interval for the CO2 intensity value, if None use config value if available, + else do not refresh. + + Returns + ------- + id_mapping + mapping of local ID to server ID """ - _user_config = SimvueConfiguration.fetch() + _user_config: SimvueConfiguration = SimvueConfiguration.fetch() cache_dir = cache_dir or _user_config.offline.cache cache_dir.joinpath("server_ids").mkdir(parents=True, exist_ok=True) @@ -234,6 +244,25 @@ def sender( ), _heartbeat_files, ) + + # If CO2 emissions are requested create a dummy monitor which just + # refreshes the CO2 intensity value if required. No emission metrics + # will be taken by the sender itself, values are assumed to be recorded + # by any offline runs being sent. + + if ( + _refresh_interval := co2_intensity_refresh + or _user_config.eco.intensity_refresh_interval + ): + CO2Monitor( + thermal_design_power_per_gpu=None, + thermal_design_power_per_cpu=None, + local_data_directory=cache_dir, + intensity_refresh_interval=_refresh_interval, + co2_intensity=co2_intensity_refresh or _user_config.eco.co2_intensity, + co2_signal_api_token=_user_config.eco.co2_signal_api_token, + ).check_refresh() + # Remove lock file to allow another sender to start in the future _lock_path.unlink() return _id_mapping diff --git a/simvue/utilities.py b/simvue/utilities.py index 25524af9..681eeaf8 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -144,6 +144,11 @@ def wrapper(self, *args, **kwargs) -> typing.Any: raise RuntimeError( f"Plotting features require the '{extra_name}' extension to Simvue" ) + elif extra_name == "eco": + if not importlib.util.find_spec("geocoder"): + raise RuntimeError( + f"Eco features require the '{extra_name}' extenstion to Simvue" + ) elif extra_name == "torch": if not importlib.util.find_spec("torch"): raise RuntimeError( diff --git a/tests/conftest.py b/tests/conftest.py index 01c5cf4e..0ac0d563 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,8 @@ import contextlib +from _pytest import monkeypatch from numpy import fix import pytest +import datetime import pytest_mock import typing import uuid @@ -10,13 +12,17 @@ import json import pathlib import logging -from simvue.api.objects.artifact import Artifact -from simvue.exception import ObjectNotFoundError +import requests + +import simvue.eco.api_client as sv_eco import simvue.run as sv_run import simvue.api.objects as sv_api_obj import simvue.config.user as sv_cfg import simvue.utilities +from simvue.api.objects.artifact import Artifact +from simvue.exception import ObjectNotFoundError + MAX_BUFFER_SIZE: int = 10 class CountingLogHandler(logging.Handler): @@ -31,6 +37,8 @@ def emit(self, record): for i, capture in enumerate(self.captures): if capture in record.msg: + if "resource" in record.msg: + print(f"[{i}={self.counts[i]}]: {record.msg}") self.counts[i] += 1 @@ -42,6 +50,52 @@ def clear_out_files() -> None: file_obj.unlink() +@pytest.fixture +def mock_co2_signal(monkeypatch: monkeypatch.MonkeyPatch) -> dict[str, dict | str]: + _mock_data = { + "data": { + "datetime": datetime.datetime.now().isoformat(), + "carbonIntensity": 40, + "fossilFuelPercentage": 39, + }, + "_disclaimer": "test disclaimer", + "countryCode": "GB", + "status": "unknown", + "units": {"carbonIntensity": "eqCO2kg/kwh"} + } + class MockCo2SignalAPIResponse: + def json(*_, **__) -> dict: + return _mock_data + + @property + def status_code(self) -> int: + return 200 + + _req_get = requests.get + + def _mock_get(*args, **kwargs) -> requests.Response: + if sv_eco.CO2_SIGNAL_API_ENDPOINT in args or kwargs.get("url") == sv_eco.CO2_SIGNAL_API_ENDPOINT: + return MockCo2SignalAPIResponse() + else: + return _req_get(*args, **kwargs) + def _mock_location_info(self) -> None: + self._logger.info("📍 Determining current user location.") + self._latitude: float + self._longitude: float + self._latitude, self._longitude = (-1, -1) + self._two_letter_country_code: str = "GB" + + monkeypatch.setattr(requests, "get", _mock_get) + monkeypatch.setattr(sv_eco.APIClient, "_get_user_location_info", _mock_location_info) + + return _mock_data + + +@pytest.fixture +def speedy_heartbeat(monkeypatch: monkeypatch.MonkeyPatch) -> None: + monkeypatch.setattr(sv_run, "HEARTBEAT_INTERVAL", 0.1) + + @pytest.fixture(autouse=True) def setup_logging() -> CountingLogHandler: logging.basicConfig(level=logging.DEBUG) @@ -57,7 +111,13 @@ def log_messages(caplog): @pytest.fixture -def create_test_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def prevent_script_exit(monkeypatch: monkeypatch.MonkeyPatch) -> None: + _orig_func = sv_run.Run._terminate_run + monkeypatch.setattr(sv_run.Run, "_terminate_run", lambda *args, **kwargs: _orig_func(*args, force_exit=False, **kwargs)) + + +@pytest.fixture +def create_test_run(request, prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with sv_run.Run() as run: _test_run_data = setup_test_run(run, True, request) yield run, _test_run_data @@ -66,14 +126,13 @@ def create_test_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], for alert_id in _test_run_data.get("alert_ids", []): with contextlib.suppress(ObjectNotFoundError): sv_api_obj.Alert(identifier=alert_id).delete() - clear_out_files() +clear_out_files() @pytest.fixture -def create_test_run_offline(mocker: pytest_mock.MockerFixture, request, monkeypatch: pytest.MonkeyPatch) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_test_run_offline(request, monkeypatch: pytest.MonkeyPatch, prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: def testing_exit(status: int) -> None: raise SystemExit(status) - mocker.patch("os._exit", testing_exit) with tempfile.TemporaryDirectory() as temp_d: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) with sv_run.Run("offline") as run: @@ -82,24 +141,23 @@ def testing_exit(status: int) -> None: @pytest.fixture -def create_plain_run(request, mocker: pytest_mock.MockFixture) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_plain_run(request, prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: def testing_exit(status: int) -> None: raise SystemExit(status) - mocker.patch("os._exit", testing_exit) with sv_run.Run() as run: yield run, setup_test_run(run, False, request) clear_out_files() @pytest.fixture -def create_pending_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_pending_run(request, prevent_script_exit) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with sv_run.Run() as run: yield run, setup_test_run(run, False, request, True) clear_out_files() @pytest.fixture -def create_plain_run_offline(mocker: pytest_mock.MockerFixture, request, monkeypatch: pytest.MonkeyPatch) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: +def create_plain_run_offline(request,prevent_script_exit,monkeypatch) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]: with tempfile.TemporaryDirectory() as temp_d: monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) with sv_run.Run("offline") as run: @@ -111,7 +169,6 @@ def create_plain_run_offline(mocker: pytest_mock.MockerFixture, request, monkeyp def create_run_object(mocker: pytest_mock.MockFixture) -> sv_api_obj.Run: def testing_exit(status: int) -> None: raise SystemExit(status) - mocker.patch("os._exit", testing_exit) _fix_use_id: str = str(uuid.uuid4()).split('-', 1)[0] _folder = sv_api_obj.Folder.new(path=f"/simvue_unit_testing/{_fix_use_id}") _folder.commit() @@ -138,7 +195,6 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur TEST_DATA["tags"].append("ci") run.config(suppress_errors=False) - run._heartbeat_interval = 1 run.init( name=TEST_DATA['metadata']['test_identifier'], tags=TEST_DATA["tags"], @@ -207,7 +263,7 @@ def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.Fixtur TEST_DATA["url"] = run._user_config.server.url TEST_DATA["headers"] = run._headers TEST_DATA["pid"] = run._pid - TEST_DATA["resources_metrics_interval"] = run._resources_metrics_interval + TEST_DATA["system_metrics_interval"] = run._system_metrics_interval if create_objects: with tempfile.TemporaryDirectory() as tempd: diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 390d90d6..5ba17620 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -161,9 +161,6 @@ def test_plot_metrics(create_test_run: tuple[sv_run.Run, dict]) -> None: ids=("sorted-metadata", "sorted-name-created", None) ) def test_get_artifacts_entries(create_test_run: tuple[sv_run.Run, dict], sorting: list[tuple[str, bool]] | None) -> None: - # TODO: Reinstate this test once server bug fixed - if any("metadata" in a[0] for a in sorting or []): - pytest.skip(reason="Server bug fix required for metadata sorting.") client = svc.Client() assert dict(client.list_artifacts(create_test_run[1]["run_id"], sort_by_columns=sorting)) assert client.get_artifact(create_test_run[1]["run_id"], name="test_attributes") @@ -252,9 +249,6 @@ def test_get_run(create_test_run: tuple[sv_run.Run, dict]) -> None: ids=("no-sort", "sort-path-metadata", "sort-modified") ) def test_get_folders(create_test_run: tuple[sv_run.Run, dict], sorting: list[tuple[str, bool]] | None) -> None: - #TODO: Once server is fixed reinstate this test - if "modified" in (a[0] for a in sorting or []): - pytest.skip(reason="Server bug when sorting by 'modified'") client = svc.Client() assert (folders := client.get_folders(sort_by_columns=sorting)) _id, _folder = next(folders) @@ -417,14 +411,20 @@ def test_alert_deletion() -> None: @pytest.mark.client -def test_abort_run(create_plain_run: tuple[sv_run.Run, dict]) -> None: +def test_abort_run(speedy_heartbeat, create_plain_run: tuple[sv_run.Run, dict]) -> None: run, run_data = create_plain_run _uuid = f"{uuid.uuid4()}".split("-")[0] run.update_tags([f"delete_me_{_uuid}"]) - time.sleep(1) _client = svc.Client() _client.abort_run(run.id, reason="Test abort") - time.sleep(1) - assert run._status == "terminated" + time.sleep(2) + + # On some machines it might take a little longer so + # try twice before accepting the abort failed + try: + assert run._status == "terminated" + except AssertionError: + time.sleep(2) + assert run._status == "terminated" diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index e493c5db..527d63da 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -1,6 +1,5 @@ +import json import os -from os.path import basename -from numpy import identity import pytest import pytest_mock import time @@ -16,8 +15,10 @@ import random import datetime import simvue -from simvue.api.objects.alert.fetch import Alert +from simvue.api.objects import Alert, Metrics +from simvue.eco.api_client import CO2SignalData, CO2SignalResponse from simvue.exception import SimvueRunError +from simvue.eco.emissions_monitor import TIME_FORMAT, CO2Monitor import simvue.run as sv_run import simvue.client as sv_cl import simvue.sender as sv_send @@ -48,34 +49,52 @@ def test_check_run_initialised_decorator() -> None: @pytest.mark.run -@pytest.mark.codecarbon -def test_run_with_emissions() -> None: - with sv_run.Run() as run_created: - run_created.init(retention_period="1 min") - run_created.config(enable_emission_metrics=True, emission_metrics_interval=1) - time.sleep(5) - _run = RunObject(identifier=run_created.id) - _metric_names = [item[0] for item in _run.metrics] - client = sv_cl.Client() - for _metric in ["emissions", "energy_consumed"]: - _total_metric_name = f"sustainability.{_metric}.total" - _delta_metric_name = f"sustainability.{_metric}.delta" - assert _total_metric_name in _metric_names - assert _delta_metric_name in _metric_names - _metric_values = client.get_metric_values( - metric_names=[_total_metric_name, _delta_metric_name], - xaxis="time", - output_format="dataframe", - run_ids=[run_created.id], - ) +@pytest.mark.eco +@pytest.mark.online +def test_run_with_emissions_online(speedy_heartbeat, mock_co2_signal, create_plain_run) -> None: + run_created, _ = create_plain_run + run_created.config(enable_emission_metrics=True) + time.sleep(3) + _run = RunObject(identifier=run_created.id) + _metric_names = [item[0] for item in _run.metrics] + client = sv_cl.Client() + for _metric in ["emissions", "energy_consumed"]: + _total_metric_name = f"sustainability.{_metric}.total" + _delta_metric_name = f"sustainability.{_metric}.delta" + assert _total_metric_name in _metric_names + assert _delta_metric_name in _metric_names + _metric_values = client.get_metric_values( + metric_names=[_total_metric_name, _delta_metric_name], + xaxis="time", + output_format="dataframe", + run_ids=[run_created.id], + ) + assert _total_metric_name in _metric_values - # Check that total = previous total + latest delta - _total_values = _metric_values[_total_metric_name].tolist() - _delta_values = _metric_values[_delta_metric_name].tolist() - assert len(_total_values) > 1 - for i in range(1, len(_total_values)): - assert _total_values[i] == _total_values[i - 1] + _delta_values[i] +@pytest.mark.run +@pytest.mark.eco +@pytest.mark.offline +def test_run_with_emissions_offline(speedy_heartbeat, mock_co2_signal, create_plain_run_offline) -> None: + run_created, _ = create_plain_run_offline + run_created.config(enable_emission_metrics=True) + time.sleep(2) + id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"]) + _run = RunObject(identifier=id_mapping[run_created.id]) + _metric_names = [item[0] for item in _run.metrics] + client = sv_cl.Client() + for _metric in ["emissions", "energy_consumed"]: + _total_metric_name = f"sustainability.{_metric}.total" + _delta_metric_name = f"sustainability.{_metric}.delta" + assert _total_metric_name in _metric_names + assert _delta_metric_name in _metric_names + _metric_values = client.get_metric_values( + metric_names=[_total_metric_name, _delta_metric_name], + xaxis="time", + output_format="dataframe", + run_ids=[id_mapping[run_created.id]], + ) + assert _total_metric_name in _metric_values @pytest.mark.run @pytest.mark.parametrize( @@ -90,20 +109,20 @@ def test_run_with_emissions() -> None: def test_log_metrics( overload_buffer: bool, timestamp: str | None, - setup_logging: "CountingLogHandler", - mocker, + mocker: pytest_mock.MockerFixture, request: pytest.FixtureRequest, visibility: typing.Literal["public", "tenant"] | list[str] | None, ) -> None: METRICS = {"a": 10, "b": 1.2} - setup_logging.captures = ["'a'", "resources/"] - # Have to create the run outside of fixtures because the resources dispatch # occurs immediately and is not captured by the handler when using the fixture run = sv_run.Run() run.config(suppress_errors=False) + metrics_spy = mocker.spy(Metrics, "new") + system_metrics_spy = mocker.spy(sv_run.Run, "_get_internal_metrics") + if visibility == "bad_option": with pytest.raises(SimvueRunError, match="visibility") as e: run.init( @@ -116,7 +135,7 @@ def test_log_metrics( retention_period="1 hour", visibility=visibility, ) - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) return run.init( @@ -129,7 +148,7 @@ def test_log_metrics( visibility=visibility, retention_period="1 hour", ) - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) # Speed up the read rate for this test run._dispatcher._max_buffer_size = 10 @@ -164,12 +183,14 @@ def test_log_metrics( assert len(_steps) == ( run._dispatcher._max_buffer_size * 3 if overload_buffer else 1 ) - # There are two debug log messages per metric dispatch - 'Executing callback on buffer' and 'Posting staged data' - # Should have done one dispatch if not overloaded, and 3 dispatches if overloaded - assert setup_logging.counts[0] == (6 if overload_buffer else 2) + + if overload_buffer: + assert metrics_spy.call_count > 2 + else: + assert metrics_spy.call_count <= 2 # Check heartbeat has been called at least once (so sysinfo sent) - assert setup_logging.counts[1] > 0 + assert system_metrics_spy.call_count >= 1 @pytest.mark.run @@ -907,7 +928,7 @@ def abort_callback(abort_run=trigger) -> None: mocker.patch("os._exit", testing_exit) N_PROCESSES: int = 3 - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) run._heartbeat_interval = 1 run._testing = True run.add_process( @@ -922,7 +943,7 @@ def abort_callback(abort_run=trigger) -> None: client = sv_cl.Client() client.abort_run(run._id, reason="testing abort") time.sleep(4) - assert run._resources_metrics_interval == 1 + assert run._system_metrics_interval == 1 for child in child_processes: assert not child.is_running() if run._status != "terminated": @@ -933,30 +954,14 @@ def abort_callback(abort_run=trigger) -> None: @pytest.mark.run def test_abort_on_alert_python( - create_plain_run: typing.Tuple[sv_run.Run, dict], mocker: pytest_mock.MockerFixture + speedy_heartbeat, create_plain_run: typing.Tuple[sv_run.Run, dict], mocker: pytest_mock.MockerFixture ) -> None: - abort_set = threading.Event() - - def testing_exit(status: int) -> None: - abort_set.set() - raise SystemExit(status) - - mocker.patch("os._exit", testing_exit) + timeout: int = 20 + interval: int = 0 run, _ = create_plain_run - run.config(resources_metrics_interval=1) - run._heartbeat_interval = 1 client = sv_cl.Client() - i = 0 - - while True: - time.sleep(1) - if i == 4: - client.abort_run(run._id, reason="testing abort") - i += 1 - if abort_set.is_set() or i > 11: - break - - assert i < 10 + client.abort_run(run.id, reason="Test abort") + time.sleep(2) assert run._status == "terminated" @@ -966,7 +971,7 @@ def test_abort_on_alert_raise( ) -> None: run, _ = create_plain_run - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) run._heartbeat_interval = 1 run._testing = True alert_id = run.create_user_alert("abort_test", trigger_abort=True) @@ -989,7 +994,7 @@ def test_abort_on_alert_raise( @pytest.mark.run def test_kill_all_processes(create_plain_run: typing.Tuple[sv_run.Run, dict]) -> None: run, _ = create_plain_run - run.config(resources_metrics_interval=1) + run.config(system_metrics_interval=1) run.add_process(identifier="forever_long_1", executable="bash", c="sleep 10000") run.add_process(identifier="forever_long_2", executable="bash", c="sleep 10000") processes = [ @@ -1019,9 +1024,11 @@ def test_run_created_with_no_timeout() -> None: @pytest.mark.parametrize("mode", ("online", "offline"), ids=("online", "offline")) @pytest.mark.run def test_reconnect(mode, monkeypatch: pytest.MonkeyPatch) -> None: + temp_d: tempfile.TemporaryDirectory | None = None + if mode == "offline": temp_d = tempfile.TemporaryDirectory() - monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d) + monkeypatch.setenv("SIMVUE_OFFLINE_DIRECTORY", temp_d.name) with simvue.Run(mode=mode) as run: run.init( @@ -1052,3 +1059,7 @@ def test_reconnect(mode, monkeypatch: pytest.MonkeyPatch) -> None: _reconnected_run = client.get_run(run_id) assert dict(_reconnected_run.metrics)["test_metric"]["last"] == 1 assert client.get_events(run_id)[0]["message"] == "Testing!" + + if temp_d: + temp_d.cleanup() + diff --git a/tests/unit/test_ecoclient.py b/tests/unit/test_ecoclient.py new file mode 100644 index 00000000..691a66fa --- /dev/null +++ b/tests/unit/test_ecoclient.py @@ -0,0 +1,84 @@ +import tempfile +import pytest +import time +import pytest_mock + +import simvue.eco.api_client as sv_eco_api +import simvue.eco.emissions_monitor as sv_eco_ems + +@pytest.mark.eco +def test_api_client_get_loc_info(mock_co2_signal) -> None: + _client = sv_eco_api.APIClient() + assert _client.latitude + assert _client.longitude + assert _client.country_code + + +@pytest.mark.eco +def test_api_client_query(mock_co2_signal: dict[str, dict | str]) -> None: + _client = sv_eco_api.APIClient() + _response: sv_eco_api.CO2SignalResponse = _client.get() + assert _response.carbon_intensity_units == mock_co2_signal["units"]["carbonIntensity"] + assert _response.country_code == mock_co2_signal["countryCode"] + assert _response.data.carbon_intensity == mock_co2_signal["data"]["carbonIntensity"] + assert _response.data.fossil_fuel_percentage == mock_co2_signal["data"]["fossilFuelPercentage"] + + +@pytest.mark.eco +@pytest.mark.parametrize( + "refresh", (True, False), ids=("refresh", "no-refresh") +) +def test_outdated_data_check( + mock_co2_signal, + refresh: bool, + mocker: pytest_mock.MockerFixture, + monkeypatch: pytest.MonkeyPatch +) -> None: + _spy = mocker.spy(sv_eco_api.APIClient, "get") + monkeypatch.setattr(sv_eco_ems, "CO2_SIGNAL_API_INTERVAL_LIMIT", 0.1) + with tempfile.TemporaryDirectory() as tempd: + _ems_monitor = sv_eco_ems.CO2Monitor( + thermal_design_power_per_cpu=80, + thermal_design_power_per_gpu=130, + local_data_directory=tempd, + intensity_refresh_interval=1 if refresh else 2, + co2_intensity=None, + co2_signal_api_token=None + ) + _measure_params = { + "process_id": "test_outdated_data_check", + "cpu_percent": 20, + "gpu_percent": 40, + "measure_interval": 1 + } + _ems_monitor.estimate_co2_emissions(**_measure_params) + time.sleep(3) + _ems_monitor.estimate_co2_emissions(**_measure_params) + + assert _spy.call_count == 2 if refresh else 1, f"{_spy.call_count} != {2 if refresh else 1}" + + +def test_co2_monitor_properties(mock_co2_signal) -> None: + with tempfile.TemporaryDirectory() as tempd: + _ems_monitor = sv_eco_ems.CO2Monitor( + thermal_design_power_per_cpu=80, + thermal_design_power_per_gpu=130, + local_data_directory=tempd, + intensity_refresh_interval=None, + co2_intensity=None, + co2_signal_api_token=None + ) + _measure_params = { + "process_id": "test_co2_monitor_properties", + "cpu_percent": 20, + "gpu_percent": 40, + "measure_interval": 1 + } + _ems_monitor.estimate_co2_emissions(**_measure_params) + assert _ems_monitor.current_carbon_intensity + assert _ems_monitor.process_data["test_co2_monitor_properties"] + assert _ems_monitor.total_power_usage + assert _ems_monitor.total_co2_emission + assert _ems_monitor.total_co2_delta + assert _ems_monitor.total_energy + assert _ems_monitor.total_energy_delta diff --git a/tests/unit/test_suppress_errors.py b/tests/unit/test_suppress_errors.py index 73c114dc..0ba7d022 100644 --- a/tests/unit/test_suppress_errors.py +++ b/tests/unit/test_suppress_errors.py @@ -12,7 +12,7 @@ def test_suppress_errors_false() -> None: with pytest.raises(RuntimeError) as e: run.config( suppress_errors=False, - disable_resources_metrics=123, + disable_system_metrics=123, ) assert "Input should be a valid boolean, unable to interpret input" in f"{e.value}" @@ -25,7 +25,7 @@ def test_suppress_errors_true(caplog) -> None: run.config(suppress_errors=True) run.config( - disable_resources_metrics=123, + disable_system_metrics=123, ) caplog.set_level(logging.ERROR) @@ -41,7 +41,7 @@ def test_suppress_errors_default(caplog) -> None: run.config(suppress_errors=True) run.config( - disable_resources_metrics=123, + disable_system_metrics=123, ) caplog.set_level(logging.ERROR)