Skip to content

Commit

Permalink
Remove omlmd from OCI calls
Browse files Browse the repository at this point in the history
Signed-off-by: Daniel J Walsh <[email protected]>
  • Loading branch information
rhatdan committed Jan 15, 2025
1 parent 87fc5d7 commit 37654ee
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 63 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ help:

.PHONY: install-requirements
install-requirements:
pipx install black flake8 argcomplete wheel omlmd huggingface_hub codespell
pipx install black flake8 argcomplete wheel huggingface_hub codespell

.PHONY: install-completions
install-completions: completions
Expand Down
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,6 @@ llama.cpp
whisper.cpp
vllm
podman
omlmd
huggingface

so if you like this tool, give some of these repos a :star:, and hey, give us a :star: too while you are at it.
Expand Down
63 changes: 11 additions & 52 deletions ramalama/oci.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
import json
import os
import subprocess
import sys
import tempfile

import ramalama.annotations as annotations
from ramalama.model import Model, MODEL_TYPES
from ramalama.common import (
available,
engine_version,
exec_cmd,
MNT_FILE,
Expand Down Expand Up @@ -111,21 +109,6 @@ def __init__(self, model, conman):
raise ValueError(f"{model} invalid: Only OCI Model types supported")
self.type = "OCI"
self.conman = conman
if available("omlmd"):
self.omlmd = "omlmd"
else:
for i in sys.path:
self.omlmd = f"{i}/../../../bin/omlmd"
if os.path.exists(self.omlmd):
break
raise NotImplementedError(
"""\
OCI models requires the omlmd module.
This module can be installed via PyPi tools like pip, pip3, pipx or via
distribution package managers like dnf or apt. Example:
pip install omlmd
"""
)

def login(self, args):
conman_args = [self.conman, "login"]
Expand Down Expand Up @@ -290,19 +273,17 @@ def push(self, source, args):

def pull(self, args):
print(f"Downloading {self.model}...")
if args.engine:
try:
conman_args = [args.engine, "pull"]
if str(args.tlsverify).lower() == "false":
conman_args.extend([f"--tls-verify={args.tlsverify}"])
if args.authfile:
conman_args.extend([f"--authfile={args.authfile}"])
conman_args.extend([self.model])
run_cmd(conman_args, debug=args.debug)
return MNT_FILE
except subprocess.CalledProcessError:
pass
return self._pull_omlmd(args)
if not args.engine:
raise NotImplementedError("OCI images require a container engine like Podman or Docker")

conman_args = [args.engine, "pull"]
if str(args.tlsverify).lower() == "false":
conman_args.extend([f"--tls-verify={args.tlsverify}"])
if args.authfile:
conman_args.extend([f"--authfile={args.authfile}"])
conman_args.extend([self.model])
run_cmd(conman_args, debug=args.debug)
return MNT_FILE

def _registry_reference(self):
try:
Expand All @@ -311,28 +292,6 @@ def _registry_reference(self):
except Exception:
return "docker.io", self.model

def _pull_omlmd(self, args):
registry, reference = self._registry_reference()
reference_dir = reference.replace(":", "/")
outdir = f"{args.store}/repos/oci/{registry}/{reference_dir}"
# note: in the current way RamaLama is designed, cannot do Helper(OMLMDRegistry()).pull(target, outdir)
# since cannot use modules/sdk, can use only cli bindings from pip installs
run_cmd([self.omlmd, "pull", self.model, "--output", outdir], debug=args.debug)
ggufs = [file for file in os.listdir(outdir) if file.endswith(".gguf")]
if len(ggufs) != 1:
raise KeyError(f"unable to identify .gguf file in: {outdir}")

directory = f"{args.store}/models/oci/{registry}/{reference_dir}"
os.makedirs(directory, exist_ok=True)
model_path = f"{directory}/{ggufs[0]}"
relative_target_path = os.path.relpath(f"{outdir}/{ggufs[0]}", start=os.path.dirname(model_path))
if os.path.exists(model_path) and os.readlink(model_path) == relative_target_path:
# Symlink is already correct, no need to update it
return model_path

run_cmd(["ln", "-sf", relative_target_path, model_path], debug=args.debug)
return model_path

def model_path(self, args):
registry, reference = self._registry_reference()
reference_dir = reference.replace(":", "/")
Expand Down
10 changes: 1 addition & 9 deletions rpm/python-ramalama.spec
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,7 @@ configure the system for AI themselves. After the initialization, RamaLama
will run the AI Models within a container based on the OCI image.

%package -n python%{python3_pkgversion}-%{pypi_name}
Requires: podman
%if 0%{?fedora} >= 40
# Needed as seen by BZ: 2327515
Requires: python%{python3_pkgversion}-omlmd
%else
Recommends: python%{python3_pkgversion}-omlmd
%endif
Recommends: podman
Summary: %{summary}
Provides: %{pypi_name} = %{version}-%{release}

Expand All @@ -68,10 +62,8 @@ configure the system for AI themselves. After the initialization, RamaLama
will run the AI Models within a container based on the OCI image.


%if 0%{?fedora} >= 40
%generate_buildrequires
%pyproject_buildrequires
%endif

%prep
%forgeautosetup -p1
Expand Down

0 comments on commit 37654ee

Please sign in to comment.