We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
vocab_size
SamplingParams
1 parent 2ebbacf commit bd0e221Copy full SHA for bd0e221
serve/mlc_serve/engine/sampling_params.py
@@ -73,7 +73,7 @@ class SamplingParams:
73
# TODO(@team): This info comes from the model config.
74
# Currently, it is unclear what is the best way to fetch this info and
75
# check in `_verify_args` without this field. Follow-up when we have a better idea.
76
- vocab_size = 32000
+ vocab_size: int = 32000
77
json_schema: Optional[Dict[str, Any]] = None
78
logits_processor: Optional[Any] = None
79
mask_prompt: Optional[torch.Tensor] = None
0 commit comments