Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
People search for ollama models using the web ui, this change
allows one to copy the url from the browser and for it to be
compatible with ramalama run.

Also pull smaller models "smollm" to accelerate the builds.

Signed-off-by: Eric Curtin <[email protected]>
  • Loading branch information
ericcurtin committed Feb 10, 2025
1 parent 159e0bf commit 60c8d1b
Show file tree
Hide file tree
Showing 6 changed files with 27 additions and 21 deletions.
2 changes: 1 addition & 1 deletion ramalama/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -967,7 +967,7 @@ def rm_cli(args):
def New(model, args):
if model.startswith("huggingface://") or model.startswith("hf://") or model.startswith("hf.co/"):
return Huggingface(model)
if model.startswith("ollama"):
if model.startswith("ollama://") or "ollama.com/library/" in model:
return Ollama(model)
if model.startswith("oci://") or model.startswith("docker://"):
return OCI(model, args.engine)
Expand Down
7 changes: 3 additions & 4 deletions ramalama/huggingface.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import pathlib
import urllib.request
from ramalama.common import available, run_cmd, exec_cmd, download_file, verify_checksum, perror
from ramalama.model import Model
from ramalama.model import Model, rm_until_substring

missing_huggingface = """
Optional: Huggingface models require the huggingface-cli module.
Expand Down Expand Up @@ -33,9 +33,8 @@ def fetch_checksum_from_api(url):

class Huggingface(Model):
def __init__(self, model):
model = model.removeprefix("huggingface://")
model = model.removeprefix("hf://")
model = model.removeprefix("hf.co/")
model = rm_until_substring(model, "hf.co/")
model = rm_until_substring(model, "://")
super().__init__(model)
self.type = "huggingface"
split = self.model.rsplit("/", 1)
Expand Down
9 changes: 9 additions & 0 deletions ramalama/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -486,3 +486,12 @@ def distinfo_volume():
return ""

return f"-v{path}:/usr/share/ramalama/{dist_info}:ro"


def rm_until_substring(model, substring):
pos = model.find(substring)
if pos == -1:
return model

# Create a new string starting after the found substring
return ''.join(model[i] for i in range(pos + len(substring), len(model)))
6 changes: 4 additions & 2 deletions ramalama/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import urllib.request
import json
from ramalama.common import run_cmd, verify_checksum, download_file
from ramalama.model import Model
from ramalama.model import Model, rm_until_substring


def fetch_manifest_data(registry_head, model_tag, accept):
Expand Down Expand Up @@ -60,7 +60,9 @@ def init_pull(repos, accept, registry_head, model_name, model_tag, models, model

class Ollama(Model):
def __init__(self, model):
super().__init__(model.removeprefix("ollama://"))
model = rm_until_substring(model, "ollama.com/library/")
model = rm_until_substring(model, "://")
super().__init__(model)
self.type = "Ollama"

def _local(self, args):
Expand Down
12 changes: 4 additions & 8 deletions ramalama/url.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,13 @@
import os
from ramalama.common import download_file
from ramalama.model import Model
from ramalama.model import Model, rm_until_substring
from urllib.parse import urlparse


class URL(Model):
def __init__(self, model):
self.type = ""
for prefix in ["file", "http", "https"]:
if model.startswith(f"{prefix}://"):
self.type = prefix
model = model.removeprefix(f"{prefix}://")
break

self.type = urlparse(model).scheme
model = rm_until_substring(model, "://")
super().__init__(model)
split = self.model.rsplit("/", 1)
self.directory = split[0].removeprefix("/") if len(split) > 1 else ""
Expand Down
12 changes: 6 additions & 6 deletions test/system/050-pull.bats
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@ load setup_suite
@test "ramalama pull ollama" {
run_ramalama pull tiny
run_ramalama rm tiny
run_ramalama pull ollama://tinyllama
run_ramalama pull https://ollama.com/library/smollm:135m
run_ramalama list
is "$output" ".*ollama://tinyllama" "image was actually pulled locally"
is "$output" ".*ollama://smollm:135m" "image was actually pulled locally"

RAMALAMA_TRANSPORT=ollama run_ramalama pull tinyllama:1.1b
run_ramalama pull ollama://tinyllama:1.1b
RAMALAMA_TRANSPORT=ollama run_ramalama pull smollm:360m
run_ramalama pull ollama://smollm:360m
run_ramalama list
is "$output" ".*ollama://tinyllama:1.1b" "image was actually pulled locally"
run_ramalama rm ollama://tinyllama ollama://tinyllama:1.1b
is "$output" ".*ollama://smollm:360m" "image was actually pulled locally"
run_ramalama rm ollama://smollm:135m ollama://smollm:360m

random_image_name=i_$(safename)
run_ramalama 1 pull ${random_image_name}
Expand Down

0 comments on commit 60c8d1b

Please sign in to comment.