@@ -53,7 +53,7 @@ def create_finetune_request(
53
53
n_checkpoints : int | None = 1 ,
54
54
batch_size : int | Literal ["max" ] = "max" ,
55
55
learning_rate : float | None = 0.00001 ,
56
- lr_scheduler_type : Literal ["linear" , "cosine" ] = "linear " ,
56
+ lr_scheduler_type : Literal ["linear" , "cosine" ] = "cosine " ,
57
57
min_lr_ratio : float = 0.0 ,
58
58
scheduler_num_cycles : float = 0.5 ,
59
59
warmup_ratio : float | None = None ,
@@ -281,7 +281,7 @@ def create(
281
281
n_checkpoints : int | None = 1 ,
282
282
batch_size : int | Literal ["max" ] = "max" ,
283
283
learning_rate : float | None = 0.00001 ,
284
- lr_scheduler_type : Literal ["linear" , "cosine" ] = "linear " ,
284
+ lr_scheduler_type : Literal ["linear" , "cosine" ] = "cosine " ,
285
285
min_lr_ratio : float = 0.0 ,
286
286
scheduler_num_cycles : float = 0.5 ,
287
287
warmup_ratio : float = 0.0 ,
@@ -318,7 +318,7 @@ def create(
318
318
batch_size (int or "max"): Batch size for fine-tuning. Defaults to max.
319
319
learning_rate (float, optional): Learning rate multiplier to use for training
320
320
Defaults to 0.00001.
321
- lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear ".
321
+ lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "cosine ".
322
322
min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
323
323
the learning rate scheduler. Defaults to 0.0.
324
324
scheduler_num_cycles (float, optional): Number or fraction of cycles for the cosine learning rate scheduler. Defaults to 0.5.
@@ -693,7 +693,7 @@ async def create(
693
693
n_checkpoints : int | None = 1 ,
694
694
batch_size : int | Literal ["max" ] = "max" ,
695
695
learning_rate : float | None = 0.00001 ,
696
- lr_scheduler_type : Literal ["linear" , "cosine" ] = "linear " ,
696
+ lr_scheduler_type : Literal ["linear" , "cosine" ] = "cosine " ,
697
697
min_lr_ratio : float = 0.0 ,
698
698
scheduler_num_cycles : float = 0.5 ,
699
699
warmup_ratio : float = 0.0 ,
@@ -730,7 +730,7 @@ async def create(
730
730
batch_size (int, optional): Batch size for fine-tuning. Defaults to max.
731
731
learning_rate (float, optional): Learning rate multiplier to use for training
732
732
Defaults to 0.00001.
733
- lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear ".
733
+ lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "cosine ".
734
734
min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
735
735
the learning rate scheduler. Defaults to 0.0.
736
736
scheduler_num_cycles (float, optional): Number or fraction of cycles for the cosine learning rate scheduler. Defaults to 0.5.
0 commit comments