From 332e5745753f3d7546dc41e2ee27985f0931d140 Mon Sep 17 00:00:00 2001 From: yrk <2493404415@qq.com> Date: Wed, 25 Jun 2025 16:39:47 +0800 Subject: [PATCH] Modify the parameters of modelscope --- .env.example | 3 +++ src/utils/llm_provider.py | 1 + 2 files changed, 4 insertions(+) diff --git a/.env.example b/.env.example index 8d7ceff5..000f11c4 100644 --- a/.env.example +++ b/.env.example @@ -21,6 +21,9 @@ OLLAMA_ENDPOINT=http://localhost:11434 ALIBABA_ENDPOINT=https://dashscope.aliyuncs.com/compatible-mode/v1 ALIBABA_API_KEY= +MODELSCOPE_ENDPOINT=https://api-inference.modelscope.cn/v1 +MODELSCOPE_API_KEY= + MOONSHOT_ENDPOINT=https://api.moonshot.cn/v1 MOONSHOT_API_KEY= diff --git a/src/utils/llm_provider.py b/src/utils/llm_provider.py index 36da5536..2ef3d638 100644 --- a/src/utils/llm_provider.py +++ b/src/utils/llm_provider.py @@ -349,6 +349,7 @@ def get_llm_model(provider: str, **kwargs): base_url=base_url, model_name=kwargs.get("model_name", "Qwen/QwQ-32B"), temperature=kwargs.get("temperature", 0.0), + extra_body = {"enable_thinking": False} ) else: raise ValueError(f"Unsupported provider: {provider}")