Skip to content

Commit

Permalink
Give decent error message if model not found
Browse files Browse the repository at this point in the history
Rename podman_build.sh to build.sh

Signed-off-by: Daniel J Walsh <[email protected]>
  • Loading branch information
rhatdan committed Jul 26, 2024
1 parent e2717ec commit ebccf9e
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 15 deletions.
File renamed without changes.
4 changes: 2 additions & 2 deletions ci.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ main() {
apt install -y python3-autopep8
fi

./podman-build.sh
./build.sh
curl -fsSL https://raw.githubusercontent.com/containers/ramalama/main/install.sh | sudo bash

set +o pipefail
./ramalama -h | grep Usage:
set -o pipefail

ramalama pull granite-code
./ramalama pull granite-code
autopep8 --exit-code ramalama # Check style is correct
# ramalama list | grep granite-code
# ramalama rm granite-code
Expand Down
38 changes: 25 additions & 13 deletions ramalama
Original file line number Diff line number Diff line change
Expand Up @@ -46,20 +46,23 @@ def verify_checksum(filename):
return calculated_checksum == expected_checksum


def print_error(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)

def run_command(args):
if x:
print(*args)

try:
subprocess.run(args, check=True)
except subprocess.CalledProcessError as e:
sys.exit(e.returncode)

subprocess.run(args, check=True)

def run_curl_command(args, filename):
if not verify_checksum(filename):
run_command(args)

try:
run_command(args)
except subprocess.CalledProcessError as e:
if e.returncode == 22:
print_error(filename + " not found")
sys.exit(e.returncode)

def pull_ollama_manifest(ramalama_store, manifests, accept, registry_head, model_tag):
os.makedirs(os.path.dirname(manifests), exist_ok=True)
Expand All @@ -71,7 +74,6 @@ def pull_ollama_manifest(ramalama_store, manifests, accept, registry_head, model
]
run_command(curl_command)


def pull_ollama_config_blob(ramalama_store, accept, registry_head, manifest_data):
cfg_hash = manifest_data["config"]["digest"]
config_blob_path = os.path.join(ramalama_store, "blobs", cfg_hash)
Expand All @@ -91,7 +93,12 @@ def pull_ollama_blob(ramalama_store, layer_digest, accept, registry_head, ramala
os.makedirs(ramalama_models, exist_ok=True)
relative_target_path = os.path.relpath(
layer_blob_path, start=os.path.dirname(symlink_path))
run_command(["ln", "-sf", relative_target_path, symlink_path])
try:
run_command(["ln", "-sf", relative_target_path, symlink_path])
except subprocess.CalledProcessError as e:
print_error(e)
sys.exit(e.returncode)



def pull_cli(ramalama_store, ramalama_models, model):
Expand All @@ -113,10 +120,15 @@ def pull_cli(ramalama_store, ramalama_models, model):
manifests = os.path.join(ramalama_store, "manifests",
registry, model_name, model_tag)
registry_head = f"{registry_scheme}://{registry}/v2/{model_name}"
pull_ollama_manifest(ramalama_store, manifests,
accept, registry_head, model_tag)
with open(manifests, 'r') as f:
manifest_data = json.load(f)
try:
pull_ollama_manifest(ramalama_store, manifests,
accept, registry_head, model_tag)
with open(manifests, 'r') as f:
manifest_data = json.load(f)
except subprocess.CalledProcessError as e:
if e.returncode == 22:
print_error(model_name + ":" + model_tag + " not found")
sys.exit(e.returncode)

pull_ollama_config_blob(ramalama_store, accept,
registry_head, manifest_data)
Expand Down

0 comments on commit ebccf9e

Please sign in to comment.