From 7192cdf7913c3514e27c25d4dc6b043c4e8716db Mon Sep 17 00:00:00 2001 From: yhjun1026 <460342015@qq.com> Date: Tue, 26 Dec 2023 16:29:48 +0800 Subject: [PATCH] feat(Agent): change plugin path change plugin path --- dbgpt/agent/agents/agent.py | 2 +- examples/agents/auto_plan_agent_dialogue_example.py | 3 ++- examples/agents/single_agent_dialogue_example.py | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/dbgpt/agent/agents/agent.py b/dbgpt/agent/agents/agent.py index 01dcbf344..432e351db 100644 --- a/dbgpt/agent/agents/agent.py +++ b/dbgpt/agent/agents/agent.py @@ -172,7 +172,7 @@ class AgentContext: resource_db: Optional[AgentResource] = None resource_knowledge: Optional[AgentResource] = None resource_internet: Optional[AgentResource] = None - llm_models: Optional[List[ModelMetadata]] = None + llm_models: Optional[List[Union[ModelMetadata, str]]] = None model_priority: Optional[dict] = None agents: Optional[List[str]] = None diff --git a/examples/agents/auto_plan_agent_dialogue_example.py b/examples/agents/auto_plan_agent_dialogue_example.py index 1cc322f0f..aab7a7477 100644 --- a/examples/agents/auto_plan_agent_dialogue_example.py +++ b/examples/agents/auto_plan_agent_dialogue_example.py @@ -25,6 +25,7 @@ from dbgpt.agent.agents.agent import AgentContext from dbgpt.agent.memory.gpts_memory import GptsMemory +from dbgpt.core.interface.llm import ModelMetadata import asyncio import os @@ -34,7 +35,7 @@ llm_client = OpenAILLMClient() context: AgentContext = AgentContext(conv_id="test456", llm_provider=llm_client) - context.llm_models = ["gpt-3.5-turbo"] + context.llm_models = [ModelMetadata(model="gpt-3.5-turbo")] default_memory = GptsMemory() coder = CodeAssistantAgent(memory=default_memory, agent_context=context) diff --git a/examples/agents/single_agent_dialogue_example.py b/examples/agents/single_agent_dialogue_example.py index 50b0da71b..71418c351 100644 --- a/examples/agents/single_agent_dialogue_example.py +++ b/examples/agents/single_agent_dialogue_example.py @@ -18,6 +18,7 @@ from dbgpt.agent.agents.user_proxy_agent import UserProxyAgent from dbgpt.agent.memory.gpts_memory import GptsMemory from dbgpt.agent.agents.agent import AgentContext +from dbgpt.core.interface.llm import ModelMetadata import asyncio import os @@ -28,7 +29,7 @@ llm_client = OpenAILLMClient() context: AgentContext = AgentContext(conv_id="test456", llm_provider=llm_client) - context.llm_models = ["gpt-3.5-turbo"] + context.llm_models = [ModelMetadata(model="gpt-3.5-turbo")] default_memory = GptsMemory() coder = CodeAssistantAgent(memory=default_memory, agent_context=context)