This commit is contained in:
James Turland 2023-12-19 17:10:40 +00:00
parent 1c7a3f7a60
commit b3ba3f8790
2 changed files with 49 additions and 0 deletions

44
Ollama/docker-compose.yml Normal file
View file

@ -0,0 +1,44 @@
version: '3.6'
services:
ollama:
# Uncomment below for GPU support
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: 1
# capabilities:
# - gpu
volumes:
- ollama:/root/.ollama
# Uncomment below to expose Ollama API outside the container stack
# ports:
# - 11434:11434
container_name: ollama
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:latest
ollama-webui:
build:
context: .
args:
OLLAMA_API_BASE_URL: '/ollama/api'
dockerfile: Dockerfile
image: ollama-webui:latest
container_name: ollama-webui
depends_on:
- ollama
ports:
- 3000:8080
environment:
- "OLLAMA_API_BASE_URL=http://ollama:11434/api"
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
volumes:
ollama: {}

5
Ollama/readme.md Normal file
View file

@ -0,0 +1,5 @@
1. Clone the repo from: https://github.com/ollama-webui/ollama-webui
2. Tweak the docker-compose to your liking
3. Run the container: sudo docker compose up -d
Let it build :)