Skip to content

Commit

Permalink
Stash output from container_manager
Browse files Browse the repository at this point in the history
Fixes: #788

Signed-off-by: Daniel J Walsh <[email protected]>
  • Loading branch information
rhatdan committed Feb 11, 2025
1 parent d13d02b commit 9e76f7b
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 11 deletions.
21 changes: 16 additions & 5 deletions ramalama/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,30 @@
DEFAULT_IMAGE = "quay.io/ramalama/ramalama"


_engine = ""


def container_manager():
global _engine
if _engine != "":
if _engine != "None":
return None
return _engine

_engine = "None"
engine = os.getenv("RAMALAMA_CONTAINER_ENGINE")
if engine is not None:
return engine
_engine = engine
return _engine

if available("podman"):
if sys.platform != "darwin" or is_podman_machine_running_with_krunkit():
return "podman"

return None
_engine = "podman"
return _engine

if available("docker"):
return "docker"
_engine = "docker"
return _engine

return None

Expand Down
4 changes: 2 additions & 2 deletions ramalama/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,11 +188,11 @@ def setup_container(self, args):
conman_args += ["-p", f"{args.port}:{args.port}"]

# Check for env var RAMALAMA_DEVICE to explicitly declare the GPU device path
device_override=0
device_override = 0
gpu_device = os.environ.get("RAMALAMA_DEVICE")
if gpu_device:
conman_args += ["--device", gpu_device]
device_override=1
device_override = 1
if device_override != 1:
if (sys.platform == "darwin" and os.path.basename(args.engine) != "docker") or os.path.exists("/dev/dri"):
conman_args += ["--device", "/dev/dri"]
Expand Down
9 changes: 5 additions & 4 deletions ramalama/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,10 @@ def init_pull(repos, accept, registry_head, model_name, model_tag, models, model
def in_existing_cache(model_name, model_tag):
if not available("ollama"):
return None
default_ollama_caches=[
default_ollama_caches = [
os.path.join(os.environ['HOME'], '.ollama/models'),
'/usr/share/ollama/.ollama/models',
f'C:\\Users\\{os.getlogin()}\\.ollama\\models'
f'C:\\Users\\{os.getlogin()}\\.ollama\\models',
]

for cache_dir in default_ollama_caches:
Expand All @@ -79,10 +79,11 @@ def in_existing_cache(model_name, model_tag):
if layer["mediaType"] == "application/vnd.ollama.image.model":
layer_digest = layer["digest"]
ollama_digest_path = os.path.join(cache_dir, 'blobs', layer_digest)
if os.path.exists(str(ollama_digest_path).replace(':','-')):
return str(ollama_digest_path).replace(':','-')
if os.path.exists(str(ollama_digest_path).replace(':', '-')):
return str(ollama_digest_path).replace(':', '-')
return None


class Ollama(Model):
def __init__(self, model):
model = rm_until_substring(model, "ollama.com/library/")
Expand Down

0 comments on commit 9e76f7b

Please sign in to comment.