-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
58 lines (54 loc) · 1.55 KB
/
docker-compose.yml
File metadata and controls
58 lines (54 loc) · 1.55 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
version: "3.9"
services:
ollama:
image: ollama/ollama:latest
container_name: spatial-explorer-ollama
environment:
- OLLAMA_HOST=0.0.0.0:11434
ports:
- "11434:11434"
volumes:
# Persist downloaded models across container restarts
- ollama_models:/root/.ollama
restart: unless-stopped
# Ollama serves by default; model will auto-pull on first request
backend:
build:
context: .
dockerfile: Dockerfile.backend
environment:
- DATABASE_URL=${DATABASE_URL:-}
- DATASET_PATH=${DATASET_PATH:-/app/datasets/mouse_brain}
# Route backend to Ollama service within Docker network
- OLLAMA_URL=http://ollama:11434
- OLLAMA_MODEL=mistral
# ChatSpatial MCP (SSE) - still on host if running outside Docker
- CHATSPATIAL_MCP_URL=${CHATSPATIAL_MCP_URL:-http://host.docker.internal:5005}
ports:
- "8000:8000"
volumes:
- ./datasets:/app/datasets:ro
depends_on:
- ollama
# Ensure host.docker.internal resolves on Linux for ChatSpatial
extra_hosts:
- "host.docker.internal:host-gateway"
restart: unless-stopped
frontend:
build:
context: .
dockerfile: Dockerfile.frontend
environment:
# Use host-visible backend so browser can reach it outside the Docker network.
- VITE_API_URL=http://localhost:8000/api
ports:
- "5173:5173"
volumes:
- ./frontend:/app:cached
- /app/node_modules
depends_on:
- backend
restart: unless-stopped
volumes:
ollama_models:
ollama_data: