From 5b79da95fd7cf79b63fbe2b9de3dd6c220899b08 Mon Sep 17 00:00:00 2001 From: ducoterra Date: Fri, 6 Feb 2026 20:19:10 -0500 Subject: [PATCH] add local ai podman docs --- active/podman_localai/localai.md | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/active/podman_localai/localai.md b/active/podman_localai/localai.md index 6da2156..ca61beb 100644 --- a/active/podman_localai/localai.md +++ b/active/podman_localai/localai.md @@ -2,6 +2,7 @@ - [Local AI with Anything LLM](#local-ai-with-anything-llm) - [Useful links I keep losing](#useful-links-i-keep-losing) + - [Podman](#podman) - [Running Local AI on Ubuntu 24.04 with Nvidia GPU](#running-local-ai-on-ubuntu-2404-with-nvidia-gpu) - [Running Local AI on Arch with AMD GPU](#running-local-ai-on-arch-with-amd-gpu) - [Running Anything LLM](#running-anything-llm) @@ -32,6 +33,12 @@ - [Example model config files from gallery](https://github.com/mudler/LocalAI/tree/master/gallery) - [List of all available models](https://github.com/mudler/LocalAI/blob/master/gallery/index.yaml) +## Podman + +```bash +docker run -ti --name local-ai -p 8081:8080 --device=/dev/kfd --device=/dev/dri --group-add=video --replace localai/localai:latest-gpu-vulkan +``` + ## Running Local AI on Ubuntu 24.04 with Nvidia GPU ```bash @@ -124,7 +131,7 @@ pipx install "huggingface_hub[cli]" podman network create --ipv6 --label local-ai local-ai # You might want to mount an external drive here. -export MODEL_DIR=/models +export MODEL_DIR=/srv/models mkdir -p $MODEL_DIR # LOCALAI_SINGLE_ACTIVE_BACKEND will unload the previous model before loading the next one @@ -136,14 +143,13 @@ mkdir -p $MODEL_DIR podman run \ -d \ -p 8080:8080 \ --e LOCALAI_API_KEY=$(cat ~/.localai/token) \ -e LOCALAI_SINGLE_ACTIVE_BACKEND=true \ --device /dev/dri \ --device /dev/kfd \ --name local-ai \ ---network local-ai \ --v $MODEL_DIR:/build/models \ --v localai-tmp:/tmp/generated \ +--replace \ +-v $MODEL_DIR:/build/models:z \ +-v localai-tmp:/tmp/generated:z \ quay.io/go-skynet/local-ai:master-hipblas-ffmpeg # The second (8081) will be our frontend. We'll protect it with basic auth. @@ -153,9 +159,9 @@ podman run \ -d \ -p 8081:8080 \ --name local-ai-webui \ ---network local-ai \ --v $MODEL_DIR:/build/models \ --v localai-tmp:/tmp/generated \ +--replace \ +-v $MODEL_DIR:/build/models:z \ +-v localai-tmp:/tmp/generated:z \ quay.io/go-skynet/local-ai:master-hipblas-ffmpeg ```