more framework desktop local ai fixes
All checks were successful
Podman DDNS Image / build-and-push-ddns (push) Successful in 1m12s
All checks were successful
Podman DDNS Image / build-and-push-ddns (push) Successful in 1m12s
This commit is contained in:
@@ -17,6 +17,7 @@
|
|||||||
- [stable-diffusion.cpp](#stable-diffusioncpp)
|
- [stable-diffusion.cpp](#stable-diffusioncpp)
|
||||||
- [open-webui](#open-webui)
|
- [open-webui](#open-webui)
|
||||||
- [Install the whole thing with quadlets (TM)](#install-the-whole-thing-with-quadlets-tm)
|
- [Install the whole thing with quadlets (TM)](#install-the-whole-thing-with-quadlets-tm)
|
||||||
|
- [Install the update script](#install-the-update-script)
|
||||||
|
|
||||||
## BIOS
|
## BIOS
|
||||||
|
|
||||||
@@ -91,6 +92,10 @@ hf auth login
|
|||||||
mkdir /home/ai/models/text/gpt-oss-120b
|
mkdir /home/ai/models/text/gpt-oss-120b
|
||||||
hf download --local-dir /home/ai/models/text/gpt-oss-120b ggml-org/gpt-oss-120b-GGUF
|
hf download --local-dir /home/ai/models/text/gpt-oss-120b ggml-org/gpt-oss-120b-GGUF
|
||||||
|
|
||||||
|
# gpt-oss-20b
|
||||||
|
mkdir /home/ai/models/text/gpt-oss-20b
|
||||||
|
hf download --local-dir /home/ai/models/text/gpt-oss-20b ggml-org/gpt-oss-20b-GGUF
|
||||||
|
|
||||||
# devstral-2-123b
|
# devstral-2-123b
|
||||||
mkdir /home/ai/models/text/devstral-2-123b
|
mkdir /home/ai/models/text/devstral-2-123b
|
||||||
hf download --local-dir /home/ai/models/text/devstral-2-123b unsloth/Devstral-2-123B-Instruct-2512-GGUF Q4_K_M/Devstral-2-123B-Instruct-2512-Q4_K_M-00001-of-00002.gguf
|
hf download --local-dir /home/ai/models/text/devstral-2-123b unsloth/Devstral-2-123B-Instruct-2512-GGUF Q4_K_M/Devstral-2-123B-Instruct-2512-Q4_K_M-00001-of-00002.gguf
|
||||||
@@ -99,6 +104,14 @@ hf download --local-dir /home/ai/models/text/devstral-2-123b unsloth/Devstral-2-
|
|||||||
# devstral-small-2-24b
|
# devstral-small-2-24b
|
||||||
mkdir /home/ai/models/text/devstral-small-2-24b
|
mkdir /home/ai/models/text/devstral-small-2-24b
|
||||||
hf download --local-dir /home/ai/models/text/devstral-small-2-24b unsloth/Devstral-Small-2-24B-Instruct-2512-GGUF Devstral-Small-2-24B-Instruct-2512-Q4_K_M.gguf
|
hf download --local-dir /home/ai/models/text/devstral-small-2-24b unsloth/Devstral-Small-2-24B-Instruct-2512-GGUF Devstral-Small-2-24B-Instruct-2512-Q4_K_M.gguf
|
||||||
|
|
||||||
|
# ministral-3-14b
|
||||||
|
mkdir /home/ai/models/text/ministral-3-14b
|
||||||
|
hf download --local-dir /home/ai/models/text/ministral-3-14b ggml-org/Ministral-3-14B-Reasoning-2512-GGUF
|
||||||
|
|
||||||
|
# nemotron-nano-30b
|
||||||
|
mkdir /home/ai/models/text/nemotron-nano-30b
|
||||||
|
hf download --local-dir /home/ai/models/text/nemotron-nano-30b ggml-org/Nemotron-Nano-3-30B-A3B-GGUF Nemotron-Nano-3-30B-A3B-Q4_K_M.gguf
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Image models
|
#### Image models
|
||||||
@@ -144,6 +157,10 @@ localhost/llama-cpp-vulkan:2026-01-19-18-00-02 \
|
|||||||
|
|
||||||
## stable-diffusion.cpp
|
## stable-diffusion.cpp
|
||||||
|
|
||||||
|
Server: <https://github.com/leejet/stable-diffusion.cpp/tree/master/examples/server>
|
||||||
|
|
||||||
|
CLI: <https://github.com/leejet/stable-diffusion.cpp/tree/master/examples/cli>
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/leejet/stable-diffusion.cpp.git
|
git clone https://github.com/leejet/stable-diffusion.cpp.git
|
||||||
cd stable-diffusion.cpp
|
cd stable-diffusion.cpp
|
||||||
@@ -165,12 +182,13 @@ localhost/stable-diffusion-cpp:latest \
|
|||||||
--llm /models/image/z-turbo/Qwen3-4B-Instruct-2507-Q4_K_M.gguf \
|
--llm /models/image/z-turbo/Qwen3-4B-Instruct-2507-Q4_K_M.gguf \
|
||||||
--cfg-scale 1.0 \
|
--cfg-scale 1.0 \
|
||||||
-v \
|
-v \
|
||||||
-H 512 \
|
-H 1024 \
|
||||||
-W 1024 \
|
-W 1024 \
|
||||||
--seed -1 \
|
--seed -1 \
|
||||||
--steps 8 \
|
--steps 8 \
|
||||||
|
--vae-conv-direct \
|
||||||
-o /output/output.png \
|
-o /output/output.png \
|
||||||
-p "A watercolor dragon with flowing ink lines, pastel palette, white paper background, soft brush strokes, high-resolution"
|
-p "A photorealistic dragon"
|
||||||
|
|
||||||
# Edit with flux kontext
|
# Edit with flux kontext
|
||||||
podman run --rm \
|
podman run --rm \
|
||||||
@@ -187,7 +205,7 @@ localhost/stable-diffusion-cpp:latest \
|
|||||||
--sampling-method euler \
|
--sampling-method euler \
|
||||||
--seed -1 \
|
--seed -1 \
|
||||||
--steps 20 \
|
--steps 20 \
|
||||||
-H 512 \
|
-H 1024 \
|
||||||
-W 1024 \
|
-W 1024 \
|
||||||
-r /output/everquest_logo.png \
|
-r /output/everquest_logo.png \
|
||||||
-p "change 'EverQuest' to 'EverSteak'" \
|
-p "change 'EverQuest' to 'EverSteak'" \
|
||||||
@@ -197,6 +215,10 @@ localhost/stable-diffusion-cpp:latest \
|
|||||||
## open-webui
|
## open-webui
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
mkdir /home/ai/.env
|
||||||
|
# Create a file called open-webui-env with `WEBUI_SECRET_KEY="some-random-key"
|
||||||
|
scp active/device_framework_desktop/secrets/open-webui-env deskwork-ai:.env/
|
||||||
|
|
||||||
# Will be available on port 8080
|
# Will be available on port 8080
|
||||||
podman run \
|
podman run \
|
||||||
-d \
|
-d \
|
||||||
@@ -215,3 +237,13 @@ ssh deskwork-ai
|
|||||||
systemctl --user daemon-reload
|
systemctl --user daemon-reload
|
||||||
systemctl --user restart ai-pod.service
|
systemctl --user restart ai-pod.service
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Install the update script
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Copy update script and run it (assumes you have llama.cpp and stable-diffusion.cpp)
|
||||||
|
scp active/device_framework_desktop/update-script.sh deskwork:
|
||||||
|
ssh deskwork-ai
|
||||||
|
chmod +x update-script.sh
|
||||||
|
./update-script.sh
|
||||||
|
```
|
||||||
|
|||||||
@@ -11,6 +11,9 @@ Image=ghcr.io/open-webui/open-webui:main
|
|||||||
# Nothing too complicated here. Open Webui will basically configure itself.
|
# Nothing too complicated here. Open Webui will basically configure itself.
|
||||||
Volume=open-webui-data:/app/backend/data
|
Volume=open-webui-data:/app/backend/data
|
||||||
|
|
||||||
|
# WEBUI_SECRET_KEY is required to prevent logout on Restart
|
||||||
|
EnvironmentFile=/home/ai/.env/open-webui-env
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Restart=always
|
Restart=always
|
||||||
# Extend Timeout to allow time to pull the image
|
# Extend Timeout to allow time to pull the image
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ Exec=-l 0.0.0.0 \
|
|||||||
--t5xxl /models/image/flux-1-kontext/t5xxl_fp16.safetensors \
|
--t5xxl /models/image/flux-1-kontext/t5xxl_fp16.safetensors \
|
||||||
--cfg-scale 1.0 \
|
--cfg-scale 1.0 \
|
||||||
--sampling-method euler \
|
--sampling-method euler \
|
||||||
|
--vae-conv-direct \
|
||||||
-v \
|
-v \
|
||||||
--seed -1 \
|
--seed -1 \
|
||||||
--steps 28
|
--steps 28
|
||||||
|
|||||||
@@ -27,6 +27,7 @@ Exec=-l 0.0.0.0 \
|
|||||||
-l 0.0.0.0 \
|
-l 0.0.0.0 \
|
||||||
--listen-port 1234 \
|
--listen-port 1234 \
|
||||||
--cfg-scale 1.0 \
|
--cfg-scale 1.0 \
|
||||||
|
--vae-conv-direct \
|
||||||
-v \
|
-v \
|
||||||
--seed -1 \
|
--seed -1 \
|
||||||
--steps 8
|
--steps 8
|
||||||
|
|||||||
20
active/device_framework_desktop/update-script.sh
Normal file
20
active/device_framework_desktop/update-script.sh
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -x
|
||||||
|
|
||||||
|
export BUILD_TAG=$(date +"%Y-%m-%d-%H-%M-%S")
|
||||||
|
|
||||||
|
echo "Updates stable-diffusion.cpp, llama.cpp, and open-webui"
|
||||||
|
|
||||||
|
cd /home/ai/llama.cpp
|
||||||
|
git pull
|
||||||
|
podman build -t llama-cpp-vulkan:${BUILD_TAG} -t llama-cpp-vulkan:latest -f .devops/vulkan.Dockerfile .
|
||||||
|
|
||||||
|
cd /home/ai/stable-diffusion.cpp
|
||||||
|
git pull
|
||||||
|
git submodule update --init --recursive
|
||||||
|
podman build -f Dockerfile.vulkan -t stable-diffusion-cpp:${BUILD_TAG} -t stable-diffusion-cpp:latest .
|
||||||
|
|
||||||
|
podman image pull ghcr.io/open-webui/open-webui:main
|
||||||
|
|
||||||
|
systemctl --user restart ai-pod
|
||||||
Reference in New Issue
Block a user