Skip to content

Commit 332996b

Browse files
Sukitlybaskaryan
andauthored
openai[patch]: fix ChatOpenAI model's openai proxy (langchain-ai#19559)
Due to changes in the OpenAI SDK, the previous method of setting the OpenAI proxy in ChatOpenAI no longer works. This PR fixes this issue, making the previous way of setting the OpenAI proxy in ChatOpenAI effective again. --------- Co-authored-by: Bagatur <[email protected]>
1 parent b15c7fd commit 332996b

File tree

2 files changed

+41
-0
lines changed
  • libs/partners/openai

2 files changed

+41
-0
lines changed

libs/partners/openai/langchain_openai/chat_models/base.py

+19
Original file line numberDiff line numberDiff line change
@@ -380,12 +380,31 @@ def validate_environment(cls, values: Dict) -> Dict:
380380
"default_query": values["default_query"],
381381
}
382382

383+
openai_proxy = values["openai_proxy"]
383384
if not values.get("client"):
385+
if openai_proxy and not values["http_client"]:
386+
try:
387+
import httpx
388+
except ImportError as e:
389+
raise ImportError(
390+
"Could not import httpx python package. "
391+
"Please install it with `pip install httpx`."
392+
) from e
393+
values["http_client"] = httpx.Client(proxy=openai_proxy)
384394
sync_specific = {"http_client": values["http_client"]}
385395
values["client"] = openai.OpenAI(
386396
**client_params, **sync_specific
387397
).chat.completions
388398
if not values.get("async_client"):
399+
if openai_proxy and not values["http_async_client"]:
400+
try:
401+
import httpx
402+
except ImportError as e:
403+
raise ImportError(
404+
"Could not import httpx python package. "
405+
"Please install it with `pip install httpx`."
406+
) from e
407+
values["http_async_client"] = httpx.AsyncClient(proxy=openai_proxy)
389408
async_specific = {"http_client": values["http_async_client"]}
390409
values["async_client"] = openai.AsyncOpenAI(
391410
**client_params, **async_specific

libs/partners/openai/tests/integration_tests/chat_models/test_base.py

+22
Original file line numberDiff line numberDiff line change
@@ -501,3 +501,25 @@ class MyModel(BaseModel):
501501
assert isinstance(result, MyModel)
502502
assert result.name == "Erick"
503503
assert result.age == 27
504+
505+
506+
def test_openai_proxy() -> None:
507+
"""Test ChatOpenAI with proxy."""
508+
chat_openai = ChatOpenAI(
509+
openai_proxy="http://localhost:8080",
510+
)
511+
mounts = chat_openai.client._client._client._mounts
512+
assert len(mounts) == 1
513+
for key, value in mounts.items():
514+
proxy = value._pool._proxy_url.origin
515+
assert proxy.scheme == b"http"
516+
assert proxy.host == b"localhost"
517+
assert proxy.port == 8080
518+
519+
async_client_mounts = chat_openai.async_client._client._client._mounts
520+
assert len(async_client_mounts) == 1
521+
for key, value in async_client_mounts.items():
522+
proxy = value._pool._proxy_url.origin
523+
assert proxy.scheme == b"http"
524+
assert proxy.host == b"localhost"
525+
assert proxy.port == 8080

0 commit comments

Comments
 (0)