Skip to content

Commit

Permalink
adding litellm tools fix
Browse files Browse the repository at this point in the history
  • Loading branch information
MervinPraison committed Feb 27, 2025
1 parent cb0fa20 commit d75c714
Show file tree
Hide file tree
Showing 3 changed files with 148 additions and 6 deletions.
14 changes: 11 additions & 3 deletions src/praisonai-agents/llm-tool-call.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,18 @@
from praisonaiagents import Agent, Task, PraisonAIAgents
from praisonaiagents import Agent
from praisonaiagents.tools import wiki_search, wiki_summary, wiki_page, wiki_random, wiki_language

agent = Agent(
agent1 = Agent(
instructions="You are a Wikipedia Agent",
tools=[wiki_search, wiki_summary, wiki_page, wiki_random, wiki_language],
llm="openai/gpt-4o-mini",
verbose=10
)
agent.start("history of AI")
agent1.start("history of AI in 1 line")

agent2 = Agent(
instructions="You are a Wikipedia Agent",
tools=[wiki_search, wiki_summary, wiki_page, wiki_random, wiki_language],
llm="gpt-4o-mini",
verbose=10
)
agent2.start("history of AI in 1 line")
4 changes: 2 additions & 2 deletions src/praisonai-agents/praisonaiagents/agent/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -754,7 +754,7 @@ def chat(self, prompt, temperature=0.2, tools=None, output_json=None, output_pyd
system_prompt=f"{self.backstory}\n\nYour Role: {self.role}\n\nYour Goal: {self.goal}" if self.use_system_prompt else None,
chat_history=self.chat_history,
temperature=temperature,
tools=tools,
tools=self.tools if tools is None else tools,
output_json=output_json,
output_pydantic=output_pydantic,
verbose=self.verbose,
Expand All @@ -765,7 +765,7 @@ def chat(self, prompt, temperature=0.2, tools=None, output_json=None, output_pyd
console=self.console,
agent_name=self.name,
agent_role=self.role,
agent_tools=[t.__name__ if hasattr(t, '__name__') else str(t) for t in self.tools],
agent_tools=[t.__name__ if hasattr(t, '__name__') else str(t) for t in (tools if tools is not None else self.tools)],
execute_tool_fn=self.execute_tool, # Pass tool execution function
reasoning_steps=reasoning_steps
)
Expand Down
136 changes: 135 additions & 1 deletion src/praisonai-agents/praisonaiagents/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,23 @@ def get_response(
# Disable litellm debug messages
litellm.set_verbose = False

# Format tools if provided
formatted_tools = None
if tools:
formatted_tools = []
for tool in tools:
if callable(tool):
tool_def = self._generate_tool_definition(tool.__name__)
elif isinstance(tool, str):
tool_def = self._generate_tool_definition(tool)
else:
continue

if tool_def:
formatted_tools.append(tool_def)
if not formatted_tools:
formatted_tools = None

# Build messages list
messages = []
if system_prompt:
Expand Down Expand Up @@ -340,6 +357,7 @@ def get_response(
messages=messages,
temperature=temperature,
stream=False, # force non-streaming
tools=formatted_tools,
**{k:v for k,v in kwargs.items() if k != 'reasoning_steps'}
)
reasoning_content = resp["choices"][0]["message"].get("provider_specific_fields", {}).get("reasoning_content")
Expand Down Expand Up @@ -371,6 +389,7 @@ def get_response(
for chunk in litellm.completion(
model=self.model,
messages=messages,
tools=formatted_tools,
temperature=temperature,
stream=True,
**kwargs
Expand All @@ -385,6 +404,7 @@ def get_response(
for chunk in litellm.completion(
model=self.model,
messages=messages,
tools=formatted_tools,
temperature=temperature,
stream=True,
**kwargs
Expand All @@ -398,6 +418,7 @@ def get_response(
final_response = litellm.completion(
model=self.model,
messages=messages,
tools=formatted_tools,
temperature=temperature,
stream=False, # No streaming for tool call check
**kwargs
Expand Down Expand Up @@ -1386,4 +1407,117 @@ async def aresponse(

except Exception as error:
display_error(f"Error in response_async: {str(error)}")
raise
raise

def _generate_tool_definition(self, function_name: str) -> Optional[Dict]:
"""Generate a tool definition from a function name."""
logging.debug(f"Attempting to generate tool definition for: {function_name}")

# First try to get the tool definition if it exists
tool_def_name = f"{function_name}_definition"
tool_def = globals().get(tool_def_name)
logging.debug(f"Looking for {tool_def_name} in globals: {tool_def is not None}")

if not tool_def:
import __main__
tool_def = getattr(__main__, tool_def_name, None)
logging.debug(f"Looking for {tool_def_name} in __main__: {tool_def is not None}")

if tool_def:
logging.debug(f"Found tool definition: {tool_def}")
return tool_def

# Try to find the function
func = globals().get(function_name)
logging.debug(f"Looking for {function_name} in globals: {func is not None}")

if not func:
import __main__
func = getattr(__main__, function_name, None)
logging.debug(f"Looking for {function_name} in __main__: {func is not None}")

if not func or not callable(func):
logging.debug(f"Function {function_name} not found or not callable")
return None

import inspect
# Handle Langchain and CrewAI tools
if inspect.isclass(func) and hasattr(func, 'run') and not hasattr(func, '_run'):
original_func = func
func = func.run
function_name = original_func.__name__
elif inspect.isclass(func) and hasattr(func, '_run'):
original_func = func
func = func._run
function_name = original_func.__name__

sig = inspect.signature(func)
logging.debug(f"Function signature: {sig}")

# Skip self, *args, **kwargs
parameters_list = []
for name, param in sig.parameters.items():
if name == "self":
continue
if param.kind in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD):
continue
parameters_list.append((name, param))

parameters = {
"type": "object",
"properties": {},
"required": []
}

# Parse docstring for parameter descriptions
docstring = inspect.getdoc(func)
logging.debug(f"Function docstring: {docstring}")

param_descriptions = {}
if docstring:
import re
param_section = re.split(r'\s*Args:\s*', docstring)
logging.debug(f"Param section split: {param_section}")
if len(param_section) > 1:
param_lines = param_section[1].split('\n')
for line in param_lines:
line = line.strip()
if line and ':' in line:
param_name, param_desc = line.split(':', 1)
param_descriptions[param_name.strip()] = param_desc.strip()

logging.debug(f"Parameter descriptions: {param_descriptions}")

for name, param in parameters_list:
param_type = "string" # Default type
if param.annotation != inspect.Parameter.empty:
if param.annotation == int:
param_type = "integer"
elif param.annotation == float:
param_type = "number"
elif param.annotation == bool:
param_type = "boolean"
elif param.annotation == list:
param_type = "array"
elif param.annotation == dict:
param_type = "object"

parameters["properties"][name] = {
"type": param_type,
"description": param_descriptions.get(name, "Parameter description not available")
}

if param.default == inspect.Parameter.empty:
parameters["required"].append(name)

logging.debug(f"Generated parameters: {parameters}")
tool_def = {
"type": "function",
"function": {
"name": function_name,
"description": docstring.split('\n\n')[0] if docstring else "No description available",
"parameters": parameters
}
}
logging.debug(f"Generated tool definition: {tool_def}")
return tool_def

0 comments on commit d75c714

Please sign in to comment.