Skip to content

Commit 4bffe1e

Browse files
authored
update openai samples to use agents (#2012)
1 parent 406a856 commit 4bffe1e

File tree

4 files changed

+40
-47
lines changed

4 files changed

+40
-47
lines changed

python/samples/getting_started/agents/azure_openai/azure_responses_client_with_local_mcp.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Copyright (c) Microsoft. All rights reserved.
22

3-
import os
43
import asyncio
4+
import os
55

66
from agent_framework import ChatAgent, MCPStreamableHTTPTool
77
from agent_framework.azure import AzureOpenAIResponsesClient
@@ -18,13 +18,14 @@
1818
# --- Below code uses Microsoft Learn MCP server over Streamable HTTP ---
1919
# --- Users can set these environment variables, or just edit the values below to their desired local MCP server
2020
MCP_NAME = os.environ.get("MCP_NAME", "Microsoft Learn MCP") # example name
21-
MCP_URL = os.environ.get("MCP_URL", "https://learn.microsoft.com/api/mcp") # example endpoint
21+
MCP_URL = os.environ.get("MCP_URL", "https://learn.microsoft.com/api/mcp") # example endpoint
2222

2323
# Environment variables for Azure OpenAI Responses authentication
2424
# AZURE_OPENAI_ENDPOINT="<your-azure openai-endpoint>"
2525
# AZURE_OPENAI_RESPONSES_DEPLOYMENT_NAME="<your-deployment-name>"
2626
# AZURE_OPENAI_API_VERSION="<your-api-version>" # e.g. "2025-03-01-preview"
2727

28+
2829
async def main():
2930
"""Example showing local MCP tools for a Azure OpenAI Responses Agent."""
3031
# AuthN: use Azure CLI
@@ -38,16 +39,13 @@ async def main():
3839

3940
agent: ChatAgent = responses_client.create_agent(
4041
name="DocsAgent",
41-
instructions=(
42-
"You are a helpful assistant that can help with Microsoft documentation questions."
43-
),
42+
instructions=("You are a helpful assistant that can help with Microsoft documentation questions."),
4443
)
4544

4645
# Connect to the MCP server (Streamable HTTP)
4746
async with MCPStreamableHTTPTool(
4847
name=MCP_NAME,
4948
url=MCP_URL,
50-
5149
) as mcp_tool:
5250
# First query — expect the agent to use the MCP tool if it helps
5351
q1 = "How to create an Azure storage account using az cli?"

python/samples/getting_started/agents/openai/openai_chat_client_with_web_search.py

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import asyncio
44

5-
from agent_framework import HostedWebSearchTool
5+
from agent_framework import ChatAgent, HostedWebSearchTool
66
from agent_framework.openai import OpenAIChatClient
77

88
"""
@@ -14,34 +14,32 @@
1414

1515

1616
async def main() -> None:
17-
client = OpenAIChatClient(model_id="gpt-4o-search-preview")
18-
19-
message = "What is the current weather? Do not ask for my current location."
20-
# Test that the client will use the web search tool with location
17+
# Test that the agent will use the web search tool with location
2118
additional_properties = {
2219
"user_location": {
2320
"country": "US",
2421
"city": "Seattle",
2522
}
2623
}
24+
25+
agent = ChatAgent(
26+
chat_client=OpenAIChatClient(model_id="gpt-4o-search-preview"),
27+
instructions="You are a helpful assistant that can search the web for current information.",
28+
tools=[HostedWebSearchTool(additional_properties=additional_properties)],
29+
)
30+
31+
message = "What is the current weather? Do not ask for my current location."
2732
stream = False
2833
print(f"User: {message}")
34+
2935
if stream:
3036
print("Assistant: ", end="")
31-
async for chunk in client.get_streaming_response(
32-
message,
33-
tools=[HostedWebSearchTool(additional_properties=additional_properties)],
34-
tool_choice="auto",
35-
):
37+
async for chunk in agent.run_stream(message):
3638
if chunk.text:
3739
print(chunk.text, end="")
3840
print("")
3941
else:
40-
response = await client.get_response(
41-
message,
42-
tools=[HostedWebSearchTool(additional_properties=additional_properties)],
43-
tool_choice="auto",
44-
)
42+
response = await agent.run(message)
4543
print(f"Assistant: {response}")
4644

4745

python/samples/getting_started/agents/openai/openai_responses_client_with_file_search.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import asyncio
44

5-
from agent_framework import HostedFileSearchTool, HostedVectorStoreContent
5+
from agent_framework import ChatAgent, HostedFileSearchTool, HostedVectorStoreContent
66
from agent_framework.openai import OpenAIResponsesClient
77

88
"""
@@ -46,22 +46,21 @@ async def main() -> None:
4646
stream = False
4747
print(f"User: {message}")
4848
file_id, vector_store = await create_vector_store(client)
49+
50+
agent = ChatAgent(
51+
chat_client=client,
52+
instructions="You are a helpful assistant that can search through files to find information.",
53+
tools=[HostedFileSearchTool(inputs=vector_store)],
54+
)
55+
4956
if stream:
5057
print("Assistant: ", end="")
51-
async for chunk in client.get_streaming_response(
52-
message,
53-
tools=[HostedFileSearchTool(inputs=vector_store)],
54-
tool_choice="auto",
55-
):
58+
async for chunk in agent.run_stream(message):
5659
if chunk.text:
5760
print(chunk.text, end="")
5861
print("")
5962
else:
60-
response = await client.get_response(
61-
message,
62-
tools=[HostedFileSearchTool(inputs=vector_store)],
63-
tool_choice="auto",
64-
)
63+
response = await agent.run(message)
6564
print(f"Assistant: {response}")
6665
await delete_vector_store(client, file_id, vector_store.vector_store_id)
6766

python/samples/getting_started/agents/openai/openai_responses_client_with_web_search.py

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import asyncio
44

5-
from agent_framework import HostedWebSearchTool
5+
from agent_framework import ChatAgent, HostedWebSearchTool
66
from agent_framework.openai import OpenAIResponsesClient
77

88
"""
@@ -14,34 +14,32 @@
1414

1515

1616
async def main() -> None:
17-
client = OpenAIResponsesClient()
18-
19-
message = "What is the current weather? Do not ask for my current location."
20-
# Test that the client will use the web search tool with location
17+
# Test that the agent will use the web search tool with location
2118
additional_properties = {
2219
"user_location": {
2320
"country": "US",
2421
"city": "Seattle",
2522
}
2623
}
24+
25+
agent = ChatAgent(
26+
chat_client=OpenAIResponsesClient(),
27+
instructions="You are a helpful assistant that can search the web for current information.",
28+
tools=[HostedWebSearchTool(additional_properties=additional_properties)],
29+
)
30+
31+
message = "What is the current weather? Do not ask for my current location."
2732
stream = False
2833
print(f"User: {message}")
34+
2935
if stream:
3036
print("Assistant: ", end="")
31-
async for chunk in client.get_streaming_response(
32-
message,
33-
tools=[HostedWebSearchTool(additional_properties=additional_properties)],
34-
tool_choice="auto",
35-
):
37+
async for chunk in agent.run_stream(message):
3638
if chunk.text:
3739
print(chunk.text, end="")
3840
print("")
3941
else:
40-
response = await client.get_response(
41-
message,
42-
tools=[HostedWebSearchTool(additional_properties=additional_properties)],
43-
tool_choice="auto",
44-
)
42+
response = await agent.run(message)
4543
print(f"Assistant: {response}")
4644

4745

0 commit comments

Comments
 (0)