graduate localai, add ollama
This commit is contained in:
@@ -1,38 +1,41 @@
|
||||
# Local AI with Anything LLM
|
||||
# Ollama
|
||||
|
||||
<https://github.com/Mintplex-Labs/anything-llm/blob/master/docker/HOW_TO_USE_DOCKER.md>
|
||||
|
||||
<https://localai.io/>
|
||||
<https://github.com/ollama/ollama>
|
||||
|
||||
## Running with Podman
|
||||
|
||||
This installs both Local AI and Anything LLM as backend/frontend services.
|
||||
<https://ollama.com/library>
|
||||
|
||||
```bash
|
||||
podman network create localai
|
||||
|
||||
# Local AI
|
||||
podman run \
|
||||
-d \
|
||||
-p 127.0.0.1:8080:8080 \
|
||||
-v ollama:/root/.ollama \
|
||||
-p 127.0.0.1:11434:po \
|
||||
--network localai \
|
||||
--name local-ai \
|
||||
-v /models:/build/models \
|
||||
quay.io/go-skynet/local-ai:latest-cpu
|
||||
--name ollama \
|
||||
docker.io/ollama/ollama
|
||||
|
||||
# Pull new models
|
||||
podman container exec ollama ollama pull llama3.2:3b
|
||||
podman container exec ollama ollama pull llama3.2:1b
|
||||
podman container exec ollama ollama pull llama3.2-vision:11b
|
||||
podman container exec ollama ollama pull llava-llama3:8b
|
||||
podman container exec ollama ollama pull deepseek-coder-v2:16b
|
||||
podman container exec ollama ollama pull opencoder:8b
|
||||
podman container exec ollama ollama pull codestral:22b
|
||||
|
||||
# Talk to an existing model via cli
|
||||
podman container exec -it ollama ollama run llama3.2:3b
|
||||
|
||||
# Anything LLM Interface
|
||||
export STORAGE_LOCATION=/anything-llm && \
|
||||
mkdir -p $STORAGE_LOCATION && \
|
||||
touch "$STORAGE_LOCATION/.env" && \
|
||||
chown -R 1000:1000 $STORAGE_LOCATION && \
|
||||
podman run \
|
||||
-d \
|
||||
-p 127.0.0.1:3001:3001 \
|
||||
--name anything-llm \
|
||||
--network localai \
|
||||
--cap-add SYS_ADMIN \
|
||||
-v ${STORAGE_LOCATION}:/app/server/storage \
|
||||
-v ${STORAGE_LOCATION}/.env:/app/server/.env \
|
||||
-v anything-llm:/app/server \
|
||||
-e STORAGE_DIR="/app/server/storage" \
|
||||
mintplexlabs/anythingllm
|
||||
```
|
||||
@@ -40,20 +43,23 @@ podman run \
|
||||
### Quadlets with Podlet
|
||||
|
||||
```bash
|
||||
podman run ghcr.io/containers/podlet --install --description "Local AI Network" \
|
||||
# Create volume for ollama
|
||||
mkdir /ollama
|
||||
|
||||
podman run --rm ghcr.io/containers/podlet --install --description "Local AI Network" \
|
||||
podman network create localai
|
||||
|
||||
podman run ghcr.io/containers/podlet --install --description "Local AI" \
|
||||
podman run --rm ghcr.io/containers/podlet --install --description "Ollama" \
|
||||
podman run \
|
||||
-d \
|
||||
-p 127.0.0.1:8080:8080 \
|
||||
-v /ollama:/root/.ollama \
|
||||
-p 127.0.0.1:11434:11434 \
|
||||
--network localai \
|
||||
--name local-ai \
|
||||
-v /models:/build/models \
|
||||
quay.io/go-skynet/local-ai:latest-cpu
|
||||
--name ollama \
|
||||
docker.io/ollama/ollama
|
||||
|
||||
export STORAGE_LOCATION=/anything-llm && \
|
||||
podman run ghcr.io/containers/podlet --install --description "Anything LLM" \
|
||||
podman run --rm ghcr.io/containers/podlet --install --description "Anything LLM" \
|
||||
podman run \
|
||||
-d \
|
||||
-p 127.0.0.1:3001:3001 \
|
||||
@@ -77,37 +83,6 @@ To the service to have them autostart.
|
||||
|
||||
Put the generated files in `/usr/share/containers/systemd/`.
|
||||
|
||||
## Models
|
||||
|
||||
Example configs can be found here:
|
||||
|
||||
<https://github.com/mudler/LocalAI/tree/9099d0c77e9e52f4a63c53aa546cc47f1e0cfdb1/gallery>
|
||||
|
||||
### Config
|
||||
|
||||
```yaml
|
||||
name: llama-3.2
|
||||
parameters:
|
||||
model: huggingface/Llama-3.2-3B-Instruct-f16.gguf
|
||||
temperature: 0.6
|
||||
backend: llama-cpp
|
||||
# Default context size
|
||||
context_size: 8192
|
||||
threads: 16
|
||||
```
|
||||
|
||||
### Chat
|
||||
|
||||
llama-3.2-3b-instruct:q8_0
|
||||
|
||||
### Code
|
||||
|
||||
<https://huggingface.co/bartowski/Codestral-22B-v0.1-GGUF/tree/main>
|
||||
|
||||
### Agent
|
||||
|
||||
llama-3.2-3b-instruct:q8_0
|
||||
|
||||
## Podman systemd service
|
||||
|
||||
See [generating AWS credentials](cloud/graduated/aws_iam/README.md)
|
||||
Reference in New Issue
Block a user