Skip to content

Commit

Permalink
Allow pushing from one type of AI Image to an OCI Image
Browse files Browse the repository at this point in the history
This PR allows users to push images pulled as one type
and then push them to an OCI Registry.

For example
ramalama pull tiny
ramalama push tiny quay.io/myregistry/tiny:latest

Signed-off-by: Daniel J Walsh <[email protected]>
  • Loading branch information
rhatdan committed Oct 7, 2024
1 parent 7f2980c commit ea928d8
Show file tree
Hide file tree
Showing 8 changed files with 37 additions and 28 deletions.
2 changes: 1 addition & 1 deletion logos/SVG/ramalama-logo-full-horiz-dark.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion logos/SVG/ramalama-logo-full-horiz.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion logos/SVG/ramalama-logo-full-vertical-dark.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion logos/SVG/ramalama-logo-full-vertical.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
9 changes: 6 additions & 3 deletions ramalama/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,14 +323,17 @@ def pull_cli(args):

def push_parser(subparsers):
parser = subparsers.add_parser("push", help="push AI Model from local storage to remote registry")
parser.add_argument("MODEL") # positional argument
parser.add_argument("SOURCE") # positional argument
parser.add_argument("TARGET") # positional argument
parser.set_defaults(func=push_cli)


def push_cli(args):
model = New(args.MODEL)
model.push(args)
smodel = New(args.SOURCE)
source = smodel.path(args)

model = New(args.TARGET)
model.push(source, args)


def _name():
Expand Down
5 changes: 4 additions & 1 deletion ramalama/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,13 @@ def login(self, args):
def logout(self, args):
raise NotImplementedError(f"ramalama logout for {self.type} not implemented")

def path(self, source, args):
raise NotImplementedError(f"ramalama puath for {self.type} not implemented")

def pull(self, args):
raise NotImplementedError(f"ramalama pull for {self.type} not implemented")

def push(self, args):
def push(self, source, args):
raise NotImplementedError(f"ramalama push for {self.type} not implemented")

def is_symlink_to(self, file_path, target_path):
Expand Down
22 changes: 7 additions & 15 deletions ramalama/oci.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
from pathlib import Path
import os
import re
import subprocess
import sys

Expand Down Expand Up @@ -36,10 +34,10 @@ def logout(self, args):
conman_args.append(self.model)
return exec_cmd(conman_args)

def _target_decompose(self):
def _target_decompose(self, model):
# Remove the prefix and extract target details
try:
registry, reference = self.model.split("/", 1)
registry, reference = model.split("/", 1)
except Exception:
raise KeyError(
f"You must specify a registry for the model in the form "
Expand All @@ -49,23 +47,17 @@ def _target_decompose(self):
reference_dir = reference.replace(":", "/")
return registry, reference, reference_dir

def push(self, args):
registry, _, reference_dir = self._target_decompose()
target = re.sub(r"^oci://", "", args.TARGET)

# Validate the model exists locally
local_model_path = os.path.join(args.store, "models/oci", registry, reference_dir)
if not os.path.exists(local_model_path):
raise KeyError(f"model {self.model} not found locally. Cannot push.")
def push(self, source, args):
target = args.TARGET.strip("oci://")
tregistry, _, treference_dir = self._target_decompose(target)

model_file = Path(local_model_path).resolve()
try:
# Push the model using omlmd, using cwd the model's file parent directory
run_cmd([self.omlmd, "push", target, str(model_file), "--empty-metadata"], cwd=model_file.parent)

run_cmd([self.omlmd, "push", target, source, "--empty-metadata"])
except subprocess.CalledProcessError as e:
perror(f"Failed to push model to OCI: {e}")
raise e
return local_model_path

def pull(self, args):
try:
Expand Down
21 changes: 16 additions & 5 deletions ramalama/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,17 +84,14 @@ def __init__(self, model):
super().__init__(model.removeprefix("ollama://"))
self.type = "Ollama"

def pull(self, args):
repos = args.store + "/repos/ollama"
def _local(self, args):
models = args.store + "/models/ollama"
registry = "https://registry.ollama.ai"
if "/" in self.model:
model_full = self.model
models = os.path.join(models, model_full.rsplit("/", 1)[0])
self._models = os.path.join(models, model_full.rsplit("/", 1)[0])
else:
model_full = "library/" + self.model

accept = "Accept: application/vnd.docker.distribution.manifest.v2+json"
if ":" in model_full:
model_name, model_tag = model_full.split(":", 1)
else:
Expand All @@ -103,9 +100,23 @@ def pull(self, args):

model_base = os.path.basename(model_name)
symlink_path = os.path.join(models, f"{model_base}:{model_tag}")
return symlink_path, models, model_base, model_name, model_tag

def path(self, args):
symlink_path, _, _, _, _ = self._local(args)
if not os.path.exists(symlink_path):
raise KeyError("f{{args.Model} does not exist")

return symlink_path

def pull(self, args):
repos = args.store + "/repos/ollama"
symlink_path, models, model_base, model_name, model_tag = self._local(args)
if os.path.exists(symlink_path):
return symlink_path

registry = "https://registry.ollama.ai"
accept = "Accept: application/vnd.docker.distribution.manifest.v2+json"
manifests = os.path.join(repos, "manifests", registry, model_name, model_tag)
registry_head = f"{registry}/v2/{model_name}"
return init_pull(
Expand Down

0 comments on commit ea928d8

Please sign in to comment.