File tree 2 files changed +41
-0
lines changed
langchain_openai/chat_models
tests/integration_tests/chat_models
2 files changed +41
-0
lines changed Original file line number Diff line number Diff line change @@ -380,12 +380,31 @@ def validate_environment(cls, values: Dict) -> Dict:
380
380
"default_query" : values ["default_query" ],
381
381
}
382
382
383
+ openai_proxy = values ["openai_proxy" ]
383
384
if not values .get ("client" ):
385
+ if openai_proxy and not values ["http_client" ]:
386
+ try :
387
+ import httpx
388
+ except ImportError as e :
389
+ raise ImportError (
390
+ "Could not import httpx python package. "
391
+ "Please install it with `pip install httpx`."
392
+ ) from e
393
+ values ["http_client" ] = httpx .Client (proxy = openai_proxy )
384
394
sync_specific = {"http_client" : values ["http_client" ]}
385
395
values ["client" ] = openai .OpenAI (
386
396
** client_params , ** sync_specific
387
397
).chat .completions
388
398
if not values .get ("async_client" ):
399
+ if openai_proxy and not values ["http_async_client" ]:
400
+ try :
401
+ import httpx
402
+ except ImportError as e :
403
+ raise ImportError (
404
+ "Could not import httpx python package. "
405
+ "Please install it with `pip install httpx`."
406
+ ) from e
407
+ values ["http_async_client" ] = httpx .AsyncClient (proxy = openai_proxy )
389
408
async_specific = {"http_client" : values ["http_async_client" ]}
390
409
values ["async_client" ] = openai .AsyncOpenAI (
391
410
** client_params , ** async_specific
Original file line number Diff line number Diff line change @@ -501,3 +501,25 @@ class MyModel(BaseModel):
501
501
assert isinstance (result , MyModel )
502
502
assert result .name == "Erick"
503
503
assert result .age == 27
504
+
505
+
506
+ def test_openai_proxy () -> None :
507
+ """Test ChatOpenAI with proxy."""
508
+ chat_openai = ChatOpenAI (
509
+ openai_proxy = "http://localhost:8080" ,
510
+ )
511
+ mounts = chat_openai .client ._client ._client ._mounts
512
+ assert len (mounts ) == 1
513
+ for key , value in mounts .items ():
514
+ proxy = value ._pool ._proxy_url .origin
515
+ assert proxy .scheme == b"http"
516
+ assert proxy .host == b"localhost"
517
+ assert proxy .port == 8080
518
+
519
+ async_client_mounts = chat_openai .async_client ._client ._client ._mounts
520
+ assert len (async_client_mounts ) == 1
521
+ for key , value in async_client_mounts .items ():
522
+ proxy = value ._pool ._proxy_url .origin
523
+ assert proxy .scheme == b"http"
524
+ assert proxy .host == b"localhost"
525
+ assert proxy .port == 8080
You can’t perform that action at this time.
0 commit comments