Skip to content

Commit

Permalink
Merge pull request #746 from containers/rhoai
Browse files Browse the repository at this point in the history
Update vLLM containers
  • Loading branch information
ericcurtin authored Feb 6, 2025
2 parents 3477a13 + 8399024 commit 1ab1908
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions ramalama/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,9 +118,9 @@ def _image(self, args):

if args.runtime == "vllm":
if gpu_type == "HIP_VISIBLE_DEVICES":
return "quay.io/modh/vllm:rhoai-2.17-rocm"
return "quay.io/modh/vllm:rhoai-2.18-rocm"

return "quay.io/modh/vllm:rhoai-2.17-cuda"
return "quay.io/modh/vllm:rhoai-2.18-cuda"

split = version().split(".")
vers = ".".join(split[:2])
Expand Down

0 comments on commit 1ab1908

Please sign in to comment.