Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ dependencies = [
"numpy >= 1.25.0; python_version < '3.12'",
"numpy >= 1.26.0; python_version >= '3.12'",
# openai connector
"openai >= 1.98.0, < 2.0.0",
"openai >= 1.98.0,<2",
# openapi and swagger
"openapi_core >= 0.18,<0.20",
"websockets >= 13, < 16",
Expand Down
72 changes: 72 additions & 0 deletions python/samples/concepts/functions/agent_framework_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
# Copyright (c) Microsoft. All rights reserved.

import asyncio

from agent_framework.openai import OpenAIResponsesClient

from semantic_kernel import Kernel
from semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletion, OpenAIChatPromptExecutionSettings
from semantic_kernel.core_plugins import TimePlugin
from semantic_kernel.functions import KernelFunctionFromPrompt
from semantic_kernel.prompt_template import KernelPromptTemplate, PromptTemplateConfig

"""
This example demonstrates how to create an agent framework tool from a kernel function
that uses a prompt template with plugin functions. The tool is then used by an Agent
Framework Agent to answer a question about the current time and date.


This sample requires manually installing the `agent-framework-core` package.

```bash
pip install agent-framework-core --pre
```
or with uv:
```bash
uv pip install agent-framework-core --prerelease=allow
```
"""


async def main():
kernel = Kernel()

service_id = "template_language"
kernel.add_service(
OpenAIChatCompletion(service_id=service_id),
)

kernel.add_plugin(TimePlugin(), "time")

function_definition = """
Today is: {{time.date}}
Current time is: {{time.time}}

Answer to the following questions using JSON syntax, including the data used.
Is it morning, afternoon, evening, or night (morning/afternoon/evening/night)?
Is it weekend time (weekend/not weekend)?
"""

print("--- Rendered Prompt ---")
prompt_template_config = PromptTemplateConfig(template=function_definition)
prompt_template = KernelPromptTemplate(prompt_template_config=prompt_template_config)
rendered_prompt = await prompt_template.render(kernel, arguments=None)
print(rendered_prompt)

function = KernelFunctionFromPrompt(
description="Determine the kind of day based on the current time and date.",
plugin_name="TimePlugin",
prompt_execution_settings=OpenAIChatPromptExecutionSettings(service_id=service_id, max_tokens=100),
function_name="kind_of_day",
prompt_template=prompt_template,
).as_agent_framework_tool(kernel=kernel)

print("--- Prompt Function Result ---")
response = await (
OpenAIResponsesClient(model_id="gpt-5-nano").create_agent(tools=function).run("What kind of day is it?")
)
print(response.text)


if __name__ == "__main__":
asyncio.run(main())
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ async def _send_image_edit_request(
try:
response: ImagesResponse = await self.client.images.edit(
image=image,
mask=mask,
mask=mask, # type: ignore
**settings.prepare_settings_dict(),
)
self.store_usage(response)
Expand Down
78 changes: 77 additions & 1 deletion python/semantic_kernel/functions/kernel_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

from opentelemetry import metrics, trace
from opentelemetry.semconv.attributes.error_attributes import ERROR_TYPE
from pydantic import Field
from pydantic import BaseModel, Field

from semantic_kernel.filters.filter_types import FilterTypes
from semantic_kernel.filters.functions.function_invocation_context import FunctionInvocationContext
Expand All @@ -35,6 +35,9 @@
from semantic_kernel.utils.telemetry.model_diagnostics import function_tracer
from semantic_kernel.utils.telemetry.model_diagnostics.gen_ai_attributes import TOOL_CALL_ARGUMENTS, TOOL_CALL_RESULT

from ..contents.chat_message_content import ChatMessageContent
from ..contents.text_content import TextContent

if TYPE_CHECKING:
from semantic_kernel.connectors.ai.prompt_execution_settings import PromptExecutionSettings
from semantic_kernel.contents.streaming_content_mixin import StreamingContentMixin
Expand Down Expand Up @@ -405,3 +408,76 @@ def _handle_exception(self, current_span: trace.Span, exception: Exception, attr
current_span.set_status(trace.StatusCode.ERROR, description=str(exception))

KernelFunctionLogMessages.log_function_error(logger, exception)

def as_agent_framework_tool(
self,
*,
name: str | None = None,
description: str | None = None,
kernel: "Kernel | None" = None,
) -> Any:
"""Convert the function to an agent framework tool.

Args:
name: The name of the tool, if None, the function name is used.
description: The description of the tool, if None, the tool description is used.
kernel: The kernel to use, if None, a kernel is created.

Returns:
AIFunction: The agent framework tool.
"""
import json

from pydantic import Field, create_model

from semantic_kernel.kernel import Kernel

try:
from agent_framework import AIFunction

except ImportError as e:
raise ImportError(
"agent_framework is not installed. Please install it with 'pip install agent-framework-core'"
) from e

if not kernel:
kernel = Kernel()
name = name or self.name
description = description or self.description
fields = {}
for param in self.parameters:
if param.include_in_function_choices:
if param.default_value is not None:
fields[param.name] = (
param.type_,
Field(description=param.description, default=param.default_value),
)
fields[param.name] = (param.type_, Field(description=param.description))
input_model = create_model("InputModel", **fields) # type: ignore

async def wrapper(*args: Any, **kwargs: Any) -> Any:
result = await self.invoke(kernel, *args, **kwargs)
if result and result.value is not None:
if isinstance(result.value, list):
results: list[Any] = []
for value in result.value:
if isinstance(value, ChatMessageContent):
results.append(str(value))
continue
if isinstance(value, TextContent):
results.append(value.text)
continue
if isinstance(value, BaseModel):
results.append(value.model_dump())
continue
results.append(value)
return json.dumps(results) if len(results) > 1 else json.dumps(results[0])
return json.dumps(result.value)
return "The function did not return a result."

return AIFunction(
name=name,
description=description,
input_model=input_model,
func=wrapper,
)
Loading
Loading