diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ac8eac82..d9ff2211 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,12 +7,17 @@ on: - 'integrated/**' - 'stl-preview-head/**' - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' jobs: lint: timeout-minutes: 10 name: lint runs-on: ${{ github.repository == 'stainless-sdks/openlayer-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 @@ -30,10 +35,10 @@ jobs: - name: Run lints run: ./scripts/lint - upload: - if: github.repository == 'stainless-sdks/openlayer-python' + build: + if: github.repository == 'stainless-sdks/openlayer-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) timeout-minutes: 10 - name: upload + name: build permissions: contents: read id-token: write @@ -41,6 +46,20 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run build + run: rye build + - name: Get GitHub OIDC Token id: github-oidc uses: actions/github-script@v6 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 4540b55c..b6cfa03d 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.2.0-alpha.64" + ".": "0.2.0-alpha.65" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index ce0aeefd..788643cb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,37 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). +## 0.2.0-alpha.65 (2025-07-02) + +Full Changelog: [v0.2.0-alpha.64...v0.2.0-alpha.65](https://github.com/openlayer-ai/openlayer-python/compare/v0.2.0-alpha.64...v0.2.0-alpha.65) + +### Features + +* **client:** add support for aiohttp ([977528d](https://github.com/openlayer-ai/openlayer-python/commit/977528d63ccc1d9c9ad534c2c84f490dcfd8fa2c)) + + +### Bug Fixes + +* **ci:** correct conditional ([f616411](https://github.com/openlayer-ai/openlayer-python/commit/f6164110ff27782f0df72c486d2c45c66f3a6cb5)) +* **ci:** release-doctor — report correct token name ([e42727c](https://github.com/openlayer-ai/openlayer-python/commit/e42727caf8c7ac350874d9195487da19df7f0081)) +* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([ab7ef6b](https://github.com/openlayer-ai/openlayer-python/commit/ab7ef6b12437afc6bc07b1839cdd5fb70d4c3628)) + + +### Chores + +* **ci:** change upload type ([49cdc9c](https://github.com/openlayer-ai/openlayer-python/commit/49cdc9c1c246051fcd78722eab8896fc3398a555)) +* **ci:** enable for pull requests ([07c86b5](https://github.com/openlayer-ai/openlayer-python/commit/07c86b5080d0c910e373b6f50b966ea56794e734)) +* **ci:** only run for pushes and fork pull requests ([fbf9c05](https://github.com/openlayer-ai/openlayer-python/commit/fbf9c05081172a447968c7c4ed011a364239ac7a)) +* **internal:** update conftest.py ([af83c82](https://github.com/openlayer-ai/openlayer-python/commit/af83c828c31f99537e8b57074a325d0ec8dec13e)) +* **readme:** update badges ([2c30786](https://github.com/openlayer-ai/openlayer-python/commit/2c30786b6870f003f4c6c2a9f68136eff15d2ebf)) +* **tests:** add tests for httpx client instantiation & proxies ([55a2e38](https://github.com/openlayer-ai/openlayer-python/commit/55a2e38b32dd755ac27b36c7b1ebffe0ef41d3f2)) +* **tests:** skip some failing tests on the latest python versions ([ef12a3a](https://github.com/openlayer-ai/openlayer-python/commit/ef12a3a6487d67e0add70f168a5954fb49c0f47b)) + + +### Documentation + +* **client:** fix httpx.Timeout documentation reference ([ad5d7c0](https://github.com/openlayer-ai/openlayer-python/commit/ad5d7c000f6ffb885d176192a98a740ff1251bd4)) + ## 0.2.0-alpha.64 (2025-06-16) Full Changelog: [v0.2.0-alpha.63...v0.2.0-alpha.64](https://github.com/openlayer-ai/openlayer-python/compare/v0.2.0-alpha.63...v0.2.0-alpha.64) diff --git a/README.md b/README.md index 99cee3f6..3d3e3976 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Openlayer Python API library -[![PyPI version](https://img.shields.io/pypi/v/openlayer.svg)](https://pypi.org/project/openlayer/) +[![PyPI version]()](https://pypi.org/project/openlayer/) The Openlayer Python library provides convenient access to the Openlayer REST API from any Python 3.8+ application. The library includes type definitions for all request params and response fields, @@ -100,6 +100,56 @@ asyncio.run(main()) Functionality between the synchronous and asynchronous clients is otherwise identical. +### With aiohttp + +By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend. + +You can enable this by installing `aiohttp`: + +```sh +# install from PyPI +pip install --pre openlayer[aiohttp] +``` + +Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`: + +```python +import os +import asyncio +from openlayer import DefaultAioHttpClient +from openlayer import AsyncOpenlayer + + +async def main() -> None: + async with AsyncOpenlayer( + api_key=os.environ.get("OPENLAYER_API_KEY"), # This is the default and can be omitted + http_client=DefaultAioHttpClient(), + ) as client: + response = await client.inference_pipelines.data.stream( + inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + config={ + "input_variable_names": ["user_query"], + "output_column_name": "output", + "num_of_token_column_name": "tokens", + "cost_column_name": "cost", + "timestamp_column_name": "timestamp", + }, + rows=[ + { + "user_query": "what is the meaning of life?", + "output": "42", + "tokens": 7, + "cost": 0.02, + "timestamp": 1610000000, + } + ], + ) + print(response.success) + + +asyncio.run(main()) +``` + ## Using types Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: @@ -227,7 +277,7 @@ client.with_options(max_retries=5).inference_pipelines.data.stream( ### Timeouts By default requests time out after 1 minute. You can configure this with a `timeout` option, -which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object: +which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object: ```python from openlayer import Openlayer diff --git a/bin/check-release-environment b/bin/check-release-environment index c0077294..b845b0f4 100644 --- a/bin/check-release-environment +++ b/bin/check-release-environment @@ -3,7 +3,7 @@ errors=() if [ -z "${PYPI_TOKEN}" ]; then - errors+=("The OPENLAYER_PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") + errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") fi lenErrors=${#errors[@]} diff --git a/pyproject.toml b/pyproject.toml index 012fe716..1823b2c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openlayer" -version = "0.2.0-alpha.64" +version = "0.2.0-alpha.65" description = "The official Python library for the openlayer API" dynamic = ["readme"] license = "Apache-2.0" @@ -43,6 +43,8 @@ classifiers = [ Homepage = "https://github.com/openlayer-ai/openlayer-python" Repository = "https://github.com/openlayer-ai/openlayer-python" +[project.optional-dependencies] +aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.6"] [tool.rye] managed = true diff --git a/requirements-dev.lock b/requirements-dev.lock index 0da348c5..6ad25b30 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -8,6 +8,13 @@ # with-sources: false -e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.13 + # via httpx-aiohttp + # via openlayer +aiosignal==1.3.2 + # via aiohttp annotated-types==0.6.0 # via pydantic anyio==4.4.0 @@ -15,6 +22,10 @@ anyio==4.4.0 # via openlayer argcomplete==3.1.2 # via nox +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp certifi==2023.7.22 # via httpcore # via httpx @@ -35,17 +46,24 @@ execnet==2.1.1 # via pytest-xdist filelock==3.12.4 # via virtualenv +frozenlist==1.7.0 + # via aiohttp + # via aiosignal h11==0.14.0 # via httpcore httpcore==1.0.2 # via httpx httpx==0.28.1 + # via httpx-aiohttp # via openlayer # via respx +httpx-aiohttp==0.1.6 + # via openlayer idna==3.4 # via anyio # via httpx # via requests + # via yarl importlib-metadata==7.0.0 iniconfig==2.0.0 # via pytest @@ -53,6 +71,9 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py +multidict==6.5.0 + # via aiohttp + # via yarl mypy==1.14.1 mypy-extensions==1.0.0 # via mypy @@ -73,6 +94,9 @@ platformdirs==3.11.0 # via virtualenv pluggy==1.5.0 # via pytest +propcache==0.3.2 + # via aiohttp + # via yarl pyarrow==15.0.2 # via openlayer pydantic==2.10.3 @@ -117,6 +141,7 @@ tqdm==4.67.1 # via openlayer typing-extensions==4.12.2 # via anyio + # via multidict # via mypy # via openlayer # via pydantic @@ -128,5 +153,7 @@ urllib3==2.2.3 # via requests virtualenv==20.24.5 # via nox +yarl==1.20.1 + # via aiohttp zipp==3.17.0 # via importlib-metadata diff --git a/requirements.lock b/requirements.lock index 6e9ac537..43e5c7c4 100644 --- a/requirements.lock +++ b/requirements.lock @@ -8,11 +8,22 @@ # with-sources: false -e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.13 + # via httpx-aiohttp + # via openlayer +aiosignal==1.3.2 + # via aiohttp annotated-types==0.6.0 # via pydantic anyio==4.4.0 # via httpx # via openlayer +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp certifi==2023.7.22 # via httpcore # via httpx @@ -23,22 +34,35 @@ distro==1.8.0 # via openlayer exceptiongroup==1.2.2 # via anyio +frozenlist==1.7.0 + # via aiohttp + # via aiosignal h11==0.14.0 # via httpcore httpcore==1.0.2 # via httpx httpx==0.28.1 + # via httpx-aiohttp + # via openlayer +httpx-aiohttp==0.1.6 # via openlayer idna==3.4 # via anyio # via httpx # via requests + # via yarl +multidict==6.5.0 + # via aiohttp + # via yarl numpy==1.26.4 # via openlayer # via pandas # via pyarrow pandas==2.2.2 # via openlayer +propcache==0.3.2 + # via aiohttp + # via yarl pyarrow==15.0.2 # via openlayer pydantic==2.10.3 @@ -64,6 +88,7 @@ tqdm==4.67.1 # via openlayer typing-extensions==4.12.2 # via anyio + # via multidict # via openlayer # via pydantic # via pydantic-core @@ -71,3 +96,5 @@ tzdata==2024.1 # via pandas urllib3==2.2.3 # via requests +yarl==1.20.1 + # via aiohttp diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh index e7a0c9ec..dbcbd064 100755 --- a/scripts/utils/upload-artifact.sh +++ b/scripts/utils/upload-artifact.sh @@ -1,7 +1,9 @@ #!/usr/bin/env bash set -exuo pipefail -RESPONSE=$(curl -X POST "$URL" \ +FILENAME=$(basename dist/*.whl) + +RESPONSE=$(curl -X POST "$URL?filename=$FILENAME" \ -H "Authorization: Bearer $AUTH" \ -H "Content-Type: application/json") @@ -12,13 +14,13 @@ if [[ "$SIGNED_URL" == "null" ]]; then exit 1 fi -UPLOAD_RESPONSE=$(tar -cz . | curl -v -X PUT \ - -H "Content-Type: application/gzip" \ - --data-binary @- "$SIGNED_URL" 2>&1) +UPLOAD_RESPONSE=$(curl -v -X PUT \ + -H "Content-Type: binary/octet-stream" \ + --data-binary "@dist/$FILENAME" "$SIGNED_URL" 2>&1) if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then echo -e "\033[32mUploaded build to Stainless storage.\033[0m" - echo -e "\033[32mInstallation: pip install --pre 'https://pkg.stainless.com/s/openlayer-python/$SHA'\033[0m" + echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/openlayer-python/$SHA/$FILENAME'\033[0m" else echo -e "\033[31mFailed to upload artifact.\033[0m" exit 1 diff --git a/src/openlayer/__init__.py b/src/openlayer/__init__.py index 8b434e24..78f0ca5d 100644 --- a/src/openlayer/__init__.py +++ b/src/openlayer/__init__.py @@ -36,7 +36,7 @@ UnprocessableEntityError, APIResponseValidationError, ) -from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient +from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient from ._utils._logs import setup_logging as _setup_logging __all__ = [ @@ -78,6 +78,7 @@ "DEFAULT_CONNECTION_LIMITS", "DefaultHttpxClient", "DefaultAsyncHttpxClient", + "DefaultAioHttpClient", ] if not _t.TYPE_CHECKING: diff --git a/src/openlayer/_base_client.py b/src/openlayer/_base_client.py index b8a466eb..e73f4f31 100644 --- a/src/openlayer/_base_client.py +++ b/src/openlayer/_base_client.py @@ -1289,6 +1289,24 @@ def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) +try: + import httpx_aiohttp +except ImportError: + + class _DefaultAioHttpClient(httpx.AsyncClient): + def __init__(self, **_kwargs: Any) -> None: + raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra") +else: + + class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + + super().__init__(**kwargs) + + if TYPE_CHECKING: DefaultAsyncHttpxClient = httpx.AsyncClient """An alias to `httpx.AsyncClient` that provides the same defaults that this SDK @@ -1297,8 +1315,12 @@ def __init__(self, **kwargs: Any) -> None: This is useful because overriding the `http_client` with your own instance of `httpx.AsyncClient` will result in httpx's defaults being used, not ours. """ + + DefaultAioHttpClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`.""" else: DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient + DefaultAioHttpClient = _DefaultAioHttpClient class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): diff --git a/src/openlayer/_version.py b/src/openlayer/_version.py index c23fffab..11525dc8 100644 --- a/src/openlayer/_version.py +++ b/src/openlayer/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "openlayer" -__version__ = "0.2.0-alpha.64" # x-release-please-version +__version__ = "0.2.0-alpha.65" # x-release-please-version diff --git a/tests/api_resources/commits/test_test_results.py b/tests/api_resources/commits/test_test_results.py index 83853215..9cf0c5cb 100644 --- a/tests/api_resources/commits/test_test_results.py +++ b/tests/api_resources/commits/test_test_results.py @@ -69,7 +69,9 @@ def test_path_params_list(self, client: Openlayer) -> None: class TestAsyncTestResults: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_list(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/inference_pipelines/test_data.py b/tests/api_resources/inference_pipelines/test_data.py index 2ce79e42..7c29f492 100644 --- a/tests/api_resources/inference_pipelines/test_data.py +++ b/tests/api_resources/inference_pipelines/test_data.py @@ -132,7 +132,9 @@ def test_path_params_stream(self, client: Openlayer) -> None: class TestAsyncData: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_stream(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/inference_pipelines/test_rows.py b/tests/api_resources/inference_pipelines/test_rows.py index bef1c42f..1c3da6d1 100644 --- a/tests/api_resources/inference_pipelines/test_rows.py +++ b/tests/api_resources/inference_pipelines/test_rows.py @@ -81,7 +81,9 @@ def test_path_params_update(self, client: Openlayer) -> None: class TestAsyncRows: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_update(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/inference_pipelines/test_test_results.py b/tests/api_resources/inference_pipelines/test_test_results.py index 210aa423..00d9cf0c 100644 --- a/tests/api_resources/inference_pipelines/test_test_results.py +++ b/tests/api_resources/inference_pipelines/test_test_results.py @@ -68,7 +68,9 @@ def test_path_params_list(self, client: Openlayer) -> None: class TestAsyncTestResults: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_list(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/projects/test_commits.py b/tests/api_resources/projects/test_commits.py index 62fc86ca..210deb41 100644 --- a/tests/api_resources/projects/test_commits.py +++ b/tests/api_resources/projects/test_commits.py @@ -123,7 +123,9 @@ def test_path_params_list(self, client: Openlayer) -> None: class TestAsyncCommits: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_create(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/projects/test_inference_pipelines.py b/tests/api_resources/projects/test_inference_pipelines.py index ea0bb5b6..e92bf727 100644 --- a/tests/api_resources/projects/test_inference_pipelines.py +++ b/tests/api_resources/projects/test_inference_pipelines.py @@ -137,7 +137,9 @@ def test_path_params_list(self, client: Openlayer) -> None: class TestAsyncInferencePipelines: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_create(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/projects/test_tests.py b/tests/api_resources/projects/test_tests.py index eaf8e170..a37a33ba 100644 --- a/tests/api_resources/projects/test_tests.py +++ b/tests/api_resources/projects/test_tests.py @@ -209,7 +209,9 @@ def test_path_params_list(self, client: Openlayer) -> None: class TestAsyncTests: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_create(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/storage/test_presigned_url.py b/tests/api_resources/storage/test_presigned_url.py index defedbfd..4f2daa29 100644 --- a/tests/api_resources/storage/test_presigned_url.py +++ b/tests/api_resources/storage/test_presigned_url.py @@ -50,7 +50,9 @@ def test_streaming_response_create(self, client: Openlayer) -> None: class TestAsyncPresignedURL: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_create(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/test_commits.py b/tests/api_resources/test_commits.py index 07a33f5f..7e786e08 100644 --- a/tests/api_resources/test_commits.py +++ b/tests/api_resources/test_commits.py @@ -57,7 +57,9 @@ def test_path_params_retrieve(self, client: Openlayer) -> None: class TestAsyncCommits: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_retrieve(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/test_inference_pipelines.py b/tests/api_resources/test_inference_pipelines.py index 9d9dba04..0fe9e9a2 100644 --- a/tests/api_resources/test_inference_pipelines.py +++ b/tests/api_resources/test_inference_pipelines.py @@ -154,7 +154,9 @@ def test_path_params_delete(self, client: Openlayer) -> None: class TestAsyncInferencePipelines: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_retrieve(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/api_resources/test_projects.py b/tests/api_resources/test_projects.py index 8803ab34..d19f2de0 100644 --- a/tests/api_resources/test_projects.py +++ b/tests/api_resources/test_projects.py @@ -97,7 +97,9 @@ def test_streaming_response_list(self, client: Openlayer) -> None: class TestAsyncProjects: - parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) @parametrize async def test_method_create(self, async_client: AsyncOpenlayer) -> None: diff --git a/tests/conftest.py b/tests/conftest.py index 1e038ff9..b12c2a24 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,13 +1,17 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + from __future__ import annotations import os import logging from typing import TYPE_CHECKING, Iterator, AsyncIterator +import httpx import pytest from pytest_asyncio import is_async_test -from openlayer import Openlayer, AsyncOpenlayer +from openlayer import Openlayer, AsyncOpenlayer, DefaultAioHttpClient +from openlayer._utils import is_dict if TYPE_CHECKING: from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage] @@ -25,6 +29,19 @@ def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: for async_test in pytest_asyncio_tests: async_test.add_marker(session_scope_marker, append=False) + # We skip tests that use both the aiohttp client and respx_mock as respx_mock + # doesn't support custom transports. + for item in items: + if "async_client" not in item.fixturenames or "respx_mock" not in item.fixturenames: + continue + + if not hasattr(item, "callspec"): + continue + + async_client_param = item.callspec.params.get("async_client") + if is_dict(async_client_param) and async_client_param.get("http_client") == "aiohttp": + item.add_marker(pytest.mark.skip(reason="aiohttp client is not compatible with respx_mock")) + base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -43,9 +60,25 @@ def client(request: FixtureRequest) -> Iterator[Openlayer]: @pytest.fixture(scope="session") async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncOpenlayer]: - strict = getattr(request, "param", True) - if not isinstance(strict, bool): - raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") - - async with AsyncOpenlayer(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: + param = getattr(request, "param", True) + + # defaults + strict = True + http_client: None | httpx.AsyncClient = None + + if isinstance(param, bool): + strict = param + elif is_dict(param): + strict = param.get("strict", True) + assert isinstance(strict, bool) + + http_client_type = param.get("http_client", "httpx") + if http_client_type == "aiohttp": + http_client = DefaultAioHttpClient() + else: + raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict") + + async with AsyncOpenlayer( + base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client + ) as client: yield client diff --git a/tests/test_client.py b/tests/test_client.py index 7562a048..24766be2 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -23,17 +23,16 @@ from openlayer import Openlayer, AsyncOpenlayer, APIResponseValidationError from openlayer._types import Omit -from openlayer._utils import maybe_transform from openlayer._models import BaseModel, FinalRequestOptions -from openlayer._constants import RAW_RESPONSE_HEADER from openlayer._exceptions import APIStatusError, APITimeoutError, APIResponseValidationError from openlayer._base_client import ( DEFAULT_TIMEOUT, HTTPX_DEFAULT_TIMEOUT, BaseClient, + DefaultHttpxClient, + DefaultAsyncHttpxClient, make_request_options, ) -from openlayer.types.inference_pipelines.data_stream_params import DataStreamParams from .utils import update_env @@ -192,6 +191,7 @@ def test_copy_signature(self) -> None: copy_param = copy_signature.parameters.get(name) assert copy_param is not None, f"copy() signature is missing the {name} param" + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") def test_copy_build_request(self) -> None: options = FinalRequestOptions(method="get", url="/foo") @@ -722,82 +722,49 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str @mock.patch("openlayer._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Openlayer) -> None: respx_mock.post("/inference-pipelines/182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e/data-stream").mock( side_effect=httpx.TimeoutException("Test timeout error") ) with pytest.raises(APITimeoutError): - self.client.post( - "/inference-pipelines/182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e/data-stream", - body=cast( - object, - maybe_transform( - dict( - config={ - "input_variable_names": ["user_query"], - "output_column_name": "output", - "num_of_token_column_name": "tokens", - "cost_column_name": "cost", - "timestamp_column_name": "timestamp", - }, - rows=[ - { - "user_query": "what is the meaning of life?", - "output": "42", - "tokens": 7, - "cost": 0.02, - "timestamp": 1610000000, - } - ], - ), - DataStreamParams, - ), - ), - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) + client.inference_pipelines.data.with_streaming_response.stream( + inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + config={"output_column_name": "output"}, + rows=[ + { + "user_query": "bar", + "output": "bar", + "tokens": "bar", + "cost": "bar", + "timestamp": "bar", + } + ], + ).__enter__() assert _get_open_connections(self.client) == 0 @mock.patch("openlayer._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Openlayer) -> None: respx_mock.post("/inference-pipelines/182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e/data-stream").mock( return_value=httpx.Response(500) ) with pytest.raises(APIStatusError): - self.client.post( - "/inference-pipelines/182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e/data-stream", - body=cast( - object, - maybe_transform( - dict( - config={ - "input_variable_names": ["user_query"], - "output_column_name": "output", - "num_of_token_column_name": "tokens", - "cost_column_name": "cost", - "timestamp_column_name": "timestamp", - }, - rows=[ - { - "user_query": "what is the meaning of life?", - "output": "42", - "tokens": 7, - "cost": 0.02, - "timestamp": 1610000000, - } - ], - ), - DataStreamParams, - ), - ), - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) - + client.inference_pipelines.data.with_streaming_response.stream( + inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + config={"output_column_name": "output"}, + rows=[ + { + "user_query": "bar", + "output": "bar", + "tokens": "bar", + "cost": "bar", + "timestamp": "bar", + } + ], + ).__enter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @@ -921,6 +888,28 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert response.http_request.headers.get("x-stainless-retry-count") == "42" + def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + @pytest.mark.respx(base_url=base_url) def test_follow_redirects(self, respx_mock: MockRouter) -> None: # Test that the default follow_redirects=True allows following redirects @@ -1084,6 +1073,7 @@ def test_copy_signature(self) -> None: copy_param = copy_signature.parameters.get(name) assert copy_param is not None, f"copy() signature is missing the {name} param" + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") def test_copy_build_request(self) -> None: options = FinalRequestOptions(method="get", url="/foo") @@ -1628,82 +1618,53 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte @mock.patch("openlayer._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - async def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + async def test_retrying_timeout_errors_doesnt_leak( + self, respx_mock: MockRouter, async_client: AsyncOpenlayer + ) -> None: respx_mock.post("/inference-pipelines/182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e/data-stream").mock( side_effect=httpx.TimeoutException("Test timeout error") ) with pytest.raises(APITimeoutError): - await self.client.post( - "/inference-pipelines/182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e/data-stream", - body=cast( - object, - maybe_transform( - dict( - config={ - "input_variable_names": ["user_query"], - "output_column_name": "output", - "num_of_token_column_name": "tokens", - "cost_column_name": "cost", - "timestamp_column_name": "timestamp", - }, - rows=[ - { - "user_query": "what is the meaning of life?", - "output": "42", - "tokens": 7, - "cost": 0.02, - "timestamp": 1610000000, - } - ], - ), - DataStreamParams, - ), - ), - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) + await async_client.inference_pipelines.data.with_streaming_response.stream( + inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + config={"output_column_name": "output"}, + rows=[ + { + "user_query": "bar", + "output": "bar", + "tokens": "bar", + "cost": "bar", + "timestamp": "bar", + } + ], + ).__aenter__() assert _get_open_connections(self.client) == 0 @mock.patch("openlayer._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter) -> None: + async def test_retrying_status_errors_doesnt_leak( + self, respx_mock: MockRouter, async_client: AsyncOpenlayer + ) -> None: respx_mock.post("/inference-pipelines/182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e/data-stream").mock( return_value=httpx.Response(500) ) with pytest.raises(APIStatusError): - await self.client.post( - "/inference-pipelines/182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e/data-stream", - body=cast( - object, - maybe_transform( - dict( - config={ - "input_variable_names": ["user_query"], - "output_column_name": "output", - "num_of_token_column_name": "tokens", - "cost_column_name": "cost", - "timestamp_column_name": "timestamp", - }, - rows=[ - { - "user_query": "what is the meaning of life?", - "output": "42", - "tokens": 7, - "cost": 0.02, - "timestamp": 1610000000, - } - ], - ), - DataStreamParams, - ), - ), - cast_to=httpx.Response, - options={"headers": {RAW_RESPONSE_HEADER: "stream"}}, - ) - + await async_client.inference_pipelines.data.with_streaming_response.stream( + inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", + config={"output_column_name": "output"}, + rows=[ + { + "user_query": "bar", + "output": "bar", + "tokens": "bar", + "cost": "bar", + "timestamp": "bar", + } + ], + ).__aenter__() assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) @@ -1875,6 +1836,28 @@ async def test_main() -> None: time.sleep(0.1) + async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultAsyncHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + async def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultAsyncHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + @pytest.mark.respx(base_url=base_url) async def test_follow_redirects(self, respx_mock: MockRouter) -> None: # Test that the default follow_redirects=True allows following redirects