Skip to content

Commit

Permalink
fix(agent): agent's llmclient bug (eosphoros-ai#2298)
Browse files Browse the repository at this point in the history
Co-authored-by: dongzhancai1 <[email protected]>
  • Loading branch information
FOkvj and dongzhancai1 authored Feb 7, 2025
1 parent bb5a078 commit 0310ce9
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions dbgpt/agent/util/llm/llm_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ async def _completions_create(
model_request = _build_model_request(payload)
str_prompt = model_request.messages_to_string()
model_output = None

sep = "########S#E#P#########"
async for output in self._llm_client.generate_stream(model_request.copy()): # type: ignore # noqa
model_output = output
if memory and stream_out:
Expand All @@ -210,7 +210,7 @@ async def _completions_create(
"receiver": "?",
"model": llm_model,
"markdown": self._output_parser.parse_model_nostream_resp(
model_output, "###"
model_output, sep
),
}

Expand All @@ -221,7 +221,7 @@ async def _completions_create(
if not model_output:
raise ValueError("LLM generate stream is null!")
parsed_output = self._output_parser.parse_model_nostream_resp(
model_output, "###"
model_output, sep
)
parsed_output = parsed_output.strip().replace("\\n", "\n")

Expand Down

0 comments on commit 0310ce9

Please sign in to comment.