Skip to content

Commit 298901e

Browse files
authored
Remove the default LoRA rank warning
Remove the default LoRA rank warning
2 parents fa58e5b + bc307c5 commit 298901e

File tree

2 files changed

+2
-5
lines changed

2 files changed

+2
-5
lines changed

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ build-backend = "poetry.masonry.api"
1212

1313
[tool.poetry]
1414
name = "together"
15-
version = "1.3.9"
15+
version = "1.3.10"
1616
authors = [
1717
"Together AI <[email protected]>"
1818
]

src/together/cli/api/finetune.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -197,10 +197,7 @@ def create(
197197
"batch_size": model_limits.lora_training.max_batch_size,
198198
"learning_rate": 1e-3,
199199
}
200-
log_warn_once(
201-
f"The default LoRA rank for {model} has been changed to {default_values['lora_r']} as the max available.\n"
202-
f"Also, the default learning rate for LoRA fine-tuning has been changed to {default_values['learning_rate']}."
203-
)
200+
204201
for arg in default_values:
205202
arg_source = ctx.get_parameter_source("arg") # type: ignore[attr-defined]
206203
if arg_source == ParameterSource.DEFAULT:

0 commit comments

Comments
 (0)