runtime error
Exit code: 1. Reason: e 70, in serve await self._serve(sockets) File "/home/user/.local/lib/python3.9/site-packages/uvicorn/server.py", line 77, in _serve config.load() File "/home/user/.local/lib/python3.9/site-packages/uvicorn/config.py", line 435, in load self.loaded_app = import_from_string(self.app) File "/home/user/.local/lib/python3.9/site-packages/uvicorn/importer.py", line 19, in import_from_string module = importlib.import_module(module_str) File "/usr/local/lib/python3.9/importlib/__init__.py", line 127, in import_module return _bootstrap._gcd_import(name[level:], package, level) File "<frozen importlib._bootstrap>", line 1030, in _gcd_import File "<frozen importlib._bootstrap>", line 1007, in _find_and_load File "<frozen importlib._bootstrap>", line 986, in _find_and_load_unlocked File "<frozen importlib._bootstrap>", line 680, in _load_unlocked File "<frozen importlib._bootstrap_external>", line 850, in exec_module File "<frozen importlib._bootstrap>", line 228, in _call_with_frames_removed File "/app/app.py", line 13, in <module> model = AutoModelForCausalLM.from_pretrained( File "/home/user/.local/lib/python3.9/site-packages/transformers/models/auto/auto_factory.py", line 559, in from_pretrained return model_class.from_pretrained( File "/home/user/.local/lib/python3.9/site-packages/transformers/modeling_utils.py", line 3659, in from_pretrained config.quantization_config = AutoHfQuantizer.merge_quantization_configs( File "/home/user/.local/lib/python3.9/site-packages/transformers/quantizers/auto.py", line 173, in merge_quantization_configs quantization_config = AutoQuantizationConfig.from_dict(quantization_config) File "/home/user/.local/lib/python3.9/site-packages/transformers/quantizers/auto.py", line 97, in from_dict raise ValueError( ValueError: Unknown quantization type, got fp8 - supported types are: ['awq', 'bitsandbytes_4bit', 'bitsandbytes_8bit', 'gptq', 'aqlm', 'quanto', 'eetq', 'hqq', 'compressed-tensors', 'fbgemm_fp8', 'torchao', 'bitnet']
Container logs:
Fetching error logs...