We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent e331fa3 commit c0fad3cCopy full SHA for c0fad3c
src/funcchain/backend/settings.py
@@ -41,6 +41,7 @@ class FuncchainSettings(BaseSettings):
41
context_lenght: int = 8196
42
n_gpu_layers: int = 50
43
keep_loaded: bool = False
44
+ repeat_penalty: float = 1.0
45
local_models_path: str = "./.models"
46
47
def model_kwargs(self) -> dict:
@@ -64,6 +65,7 @@ def llamacpp_kwargs(self) -> dict:
64
65
"n_ctx": self.context_lenght,
66
"use_mlock": self.keep_loaded,
67
"n_gpu_layers": self.n_gpu_layers,
68
+ "repeat_penalty": self.repeat_penalty,
69
}
70
71
0 commit comments