Skip to content

Commit 9725d4d

Browse files
authored
Adjust reasoning model token requirement (#8750)
1 parent 03543df commit 9725d4d

File tree

3 files changed

+10
-10
lines changed

3 files changed

+10
-10
lines changed

dspy/adapters/two_step_adapter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ class TwoStepAdapter(Adapter):
3030
Example:
3131
```
3232
import dspy
33-
lm = dspy.LM(model="openai/o3-mini", max_tokens=10000, temperature = 1.0)
33+
lm = dspy.LM(model="openai/o3-mini", max_tokens=16000, temperature = 1.0)
3434
adapter = dspy.TwoStepAdapter(dspy.LM("openai/gpt-4o-mini"))
3535
dspy.configure(lm=lm, adapter=adapter)
3636
program = dspy.ChainOfThought("question->answer")

dspy/clients/lm.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -86,10 +86,10 @@ def __init__(
8686
model_pattern = re.match(r"^(?:o[1345]|gpt-5)(?:-(?:mini|nano))?", model_family)
8787

8888
if model_pattern:
89-
if max_tokens < 20000 or temperature != 1.0:
89+
if max_tokens < 16000 or temperature != 1.0:
9090
raise ValueError(
91-
"OpenAI's reasoning models require passing temperature=1.0 and max_tokens >= 20000 to "
92-
"`dspy.LM(...)`, e.g., dspy.LM('openai/gpt-5', temperature=1.0, max_tokens=20000)"
91+
"OpenAI's reasoning models require passing temperature=1.0 and max_tokens >= 16000 to "
92+
"`dspy.LM(...)`, e.g., dspy.LM('openai/gpt-5', temperature=1.0, max_tokens=16000)"
9393
)
9494
self.kwargs = dict(temperature=temperature, max_completion_tokens=max_tokens, **kwargs)
9595
if self.kwargs.get("rollout_id") is None:

tests/clients/test_lm.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -269,12 +269,12 @@ def test_reasoning_model_token_parameter():
269269
lm = dspy.LM(
270270
model=model_name,
271271
temperature=1.0 if is_reasoning_model else 0.7,
272-
max_tokens=20_000 if is_reasoning_model else 1000,
272+
max_tokens=16_000 if is_reasoning_model else 1000,
273273
)
274274
if is_reasoning_model:
275275
assert "max_completion_tokens" in lm.kwargs
276276
assert "max_tokens" not in lm.kwargs
277-
assert lm.kwargs["max_completion_tokens"] == 20_000
277+
assert lm.kwargs["max_completion_tokens"] == 16_000
278278
else:
279279
assert "max_completion_tokens" not in lm.kwargs
280280
assert "max_tokens" in lm.kwargs
@@ -285,21 +285,21 @@ def test_reasoning_model_requirements(model_name):
285285
# Should raise assertion error if temperature or max_tokens requirements not met
286286
with pytest.raises(
287287
ValueError,
288-
match="reasoning models require passing temperature=1.0 and max_tokens >= 20000",
288+
match="reasoning models require passing temperature=1.0 and max_tokens >= 16000",
289289
):
290290
dspy.LM(
291291
model=model_name,
292292
temperature=0.7, # Should be 1.0
293-
max_tokens=1000, # Should be >= 20_000
293+
max_tokens=1000, # Should be >= 16_000
294294
)
295295

296296
# Should pass with correct parameters
297297
lm = dspy.LM(
298298
model=model_name,
299299
temperature=1.0,
300-
max_tokens=20_000,
300+
max_tokens=16_000,
301301
)
302-
assert lm.kwargs["max_completion_tokens"] == 20_000
302+
assert lm.kwargs["max_completion_tokens"] == 16_000
303303

304304

305305
def test_dump_state():

0 commit comments

Comments
 (0)