Skip to content

Commit

Permalink
Merge pull request containers#48 from containers/push-cli
Browse files Browse the repository at this point in the history
Implement `ramalama push` command for pushing models to remote reposi…
  • Loading branch information
rhatdan authored Aug 22, 2024
2 parents a59856c + a26048d commit 9e0b4eb
Show file tree
Hide file tree
Showing 7 changed files with 83 additions and 18 deletions.
2 changes: 1 addition & 1 deletion container-images/ramalama/latest/Containerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ RUN dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.n

RUN /usr/bin/python3 --version
RUN pip install "huggingface_hub[cli]==0.24.2"
RUN pip install "omlmd==0.1.2"
RUN pip install "omlmd==0.1.4"

ENV GGML_CCACHE=0

Expand Down
6 changes: 3 additions & 3 deletions docs/source/markdown/ramalama-pull.1.md
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
% ramalama-pull 1

## NAME
ramalama - Pull and AI Models into local storage
ramalama - Pull AI Models into local storage

## SYNOPSIS
**ramalama pull** [*options*] *model* [*model*...]
**ramalama pull** [*options*] *model*

## DESCRIPTION
Pull specified AI Models into local storage
Pull specified AI Model into local storage

## SEE ALSO
**[ramalama(1)](ramalama.1.md)
Expand Down
17 changes: 17 additions & 0 deletions docs/source/markdown/ramalama-push.1.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
% ramalama-push 1

## NAME
ramalama - Push AI Model (OCI-only at present)

## SYNOPSIS
**ramalama push** [*options*] *model* *target*

## DESCRIPTION
Push specified AI Model (OCI-only at present)

## SEE ALSO
**[ramalama(1)](ramalama.1.md)

## HISTORY
Aug 2024, Originally compiled by Eric Curtin <[email protected]>

1 change: 1 addition & 0 deletions docs/source/markdown/ramalama.1.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ Ramalama : The goal of ramalama is to make AI even more boring.
| ------------------------------------------------ | --------------------------------------------------------------------------- |
| [ramalama-list(1)](ramalama-list.1.md) | Liste all AI models in local storage. |
| [ramalama-pull(1)](ramalama-pull.1.md) | Pull AI Model from registry to local storage |
| [ramalama-push(1)](ramalama-push.1.md) | Push specified AI Model (OCI-only at present) |
| [ramalama-run(1)](ramalama-run.1.md) | Run a chatbot on AI Model. |
| [ramalama-serve(1)](ramalama-serve.1.md)| Serve local AI Model as an API Service. |

Expand Down
2 changes: 1 addition & 1 deletion install.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def main():
subprocess.run(["pip3", "install", "--break-system-packages",
"huggingface_hub[cli]==0.24.2"], check=True)
subprocess.run(["pip3", "install", "--break-system-packages",
"omlmd==0.1.2"], check=True)
"omlmd==0.1.4"], check=True)
subprocess.run(["brew", "install", "llama.cpp"], check=True)

ramalama_bin = os.path.join(bindir, binfile)
Expand Down
71 changes: 59 additions & 12 deletions ramalama.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,11 @@ def print_error(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)


def run_cmd(args):
def run_cmd(args, cwd=None):
if x:
print(*args)

return subprocess.run(args, check=True, stdout=subprocess.PIPE)
return subprocess.run(args, check=True, cwd=cwd, stdout=subprocess.PIPE)


def exec_cmd(args):
Expand Down Expand Up @@ -254,12 +254,7 @@ def pull_huggingface(model, ramalama_store):


def pull_oci(model, ramalama_store):
target = re.sub(r'^oci://', '', model)
registry, reference = target.split('/', 1)
registry, reference = ("docker.io",
target) if "." not in registry else (
registry, reference)
reference_dir = reference.replace(":", "/")
target, registry, reference, reference_dir = oci_target_decompose(model)
outdir = f"{ramalama_store}/repos/oci/{registry}/{reference_dir}"
print(f"Downloading {target}...")
# note: in the current way ramalama is designed, cannot do Helper(OMLMDRegistry()).pull(target, outdir) since cannot use modules/sdk, can use only cli bindings from pip installs
Expand Down Expand Up @@ -333,6 +328,57 @@ def pull_cli(ramalama_store, args, port):
funcDict["pull"] = pull_cli


def oci_target_decompose(model):
# Remove the prefix and extract target details
target = re.sub(r'^oci://', '', model)
registry, reference = target.split('/', 1)
if "." not in registry:
print_error(f"You must specify a registry for the model in the form 'oci://registry.acme.org/ns/repo:tag', got instead: {model}")
sys.exit(1)
reference_dir = reference.replace(":", "/")
return target, registry, reference, reference_dir


def push_oci(ramalama_store, model, target):
_, registry, _, reference_dir = oci_target_decompose(model)
target = re.sub(r'^oci://', '', target)

# Validate the model exists locally
local_model_path = os.path.join(
ramalama_store, 'models/oci', registry, reference_dir)
if not os.path.exists(local_model_path):
print_error(f"Model {model} not found locally. Cannot push.")
sys.exit(1)

model_file = Path(local_model_path).resolve()
try:
# Push the model using omlmd, using cwd the model's file parent directory
run_cmd(["omlmd", "push", target, str(model_file), "--empty-metadata"], cwd=model_file.parent)
except subprocess.CalledProcessError as e:
print_error(f"Failed to push model to OCI: {e}")
sys.exit(e.returncode)

return local_model_path


def push_cli(ramalama_store, args, port):
if len(args) < 2:
usage()

model = args.pop(0)
target = args.pop(0)
if model.startswith("oci://"):
return push_oci(ramalama_store, model, target)

# TODO: Additional repository types can be added here, e.g., Ollama, HuggingFace, etc.
else:
print_error(f"Unsupported repository type for model: {model}")
sys.exit(1)


funcDict["push"] = push_cli


def run_cli(ramalama_store, args, port):
if len(args) < 1:
usage()
Expand Down Expand Up @@ -361,10 +407,11 @@ def usage():
print(f" {os.path.basename(__file__)} COMMAND")
print()
print("Commands:")
print(" list List models")
print(" pull MODEL Pull a model")
print(" run MODEL Run a model")
print(" serve MODEL Serve a model")
print(" list List models")
print(" pull MODEL Pull a model")
print(" push MODEL TARGET Push a model to target")
print(" run MODEL Run a model")
print(" serve MODEL Serve a model")
sys.exit(1)


Expand Down
2 changes: 1 addition & 1 deletion test/ci.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ main() {
/usr/bin/python3 --version
pip install "huggingface_hub[cli]==0.24.2"
huggingface-cli --help
pip install "omlmd==0.1.2"
pip install "omlmd==0.1.4"
omlmd --help
fi

Expand Down

0 comments on commit 9e0b4eb

Please sign in to comment.