From e018012408ce539479ddbc141c67c24c2ce9ae09 Mon Sep 17 00:00:00 2001 From: Daniel J Walsh Date: Fri, 11 Oct 2024 09:24:48 -0400 Subject: [PATCH] On Mac, run in container ... if podman is configured with krunkit, or if Docker is installed. Signed-off-by: Daniel J Walsh --- docs/ramalama.1.md | 7 ++++++- ramalama/cli.py | 29 ++++++++++++++++++++++++++++- 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/docs/ramalama.1.md b/docs/ramalama.1.md index 9b31b5b1..fdab3f22 100644 --- a/docs/ramalama.1.md +++ b/docs/ramalama.1.md @@ -26,6 +26,11 @@ behaviour. RamaLama supports multiple AI model registries types called transports. Supported transports: +Note: + +On Apple Macs with Arm support and Podman, the Podman machine must be +configured to use the krunkit VM Type. This allows the Mac's GPU to be +used within the VM. ## TRANSPORTS @@ -117,7 +122,7 @@ store AI Models in the specified directory (default rootless: `$HOME/.local/shar ## SEE ALSO -**[podman(1)](https://github.com/containers/podman/blob/main/docs/podman.1.md)** +**[podman(1)](https://github.com/containers/podman/blob/main/docs/podman.1.md)**, **docker(1) ## HISTORY Aug 2024, Originally compiled by Dan Walsh diff --git a/ramalama/cli.py b/ramalama/cli.py index 47127204..fc6373b8 100644 --- a/ramalama/cli.py +++ b/ramalama/cli.py @@ -30,14 +30,41 @@ class HelpException(Exception): pass +def ai_support_in_vm(): + conman = container_manager() + if conman == "": + return False + + if conman == "podman": + conman_args = [conman, "machine", "list", "--format", "{{ .VMType }}"] + try: + output = run_cmd(conman_args).stdout.decode("utf-8").strip() + if output == "krunkit": + return True + except subprocess.CalledProcessError: + pass + perror( + """\ +Warning: podman needs to be configured to use krunkit for AI Workloads, +running without containers +""" + ) + return False + # Assume this is running with Docker and return true + return True + + def use_container(): transport = os.getenv("RAMALAMA_IN_CONTAINER") if transport: return transport.lower() == "true" - if in_container() or sys.platform == "darwin": + if in_container(): return False + if sys.platform == "darwin": + return ai_support_in_vm() + return True