Inspired by BugSETI architecture — system tray with WebView2 windows, Docker Compose stack (Forgejo + InfluxDB + inference proxy), and scoring agent integration. Builds as signed native binary on macOS. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
88 lines
2.7 KiB
YAML
88 lines
2.7 KiB
YAML
# LEM Desktop — Docker Compose Stack
|
|
# Provides local Forgejo (agentic git), InfluxDB (metrics), and inference proxy.
|
|
#
|
|
# Usage:
|
|
# lem desktop start # starts all services
|
|
# docker compose -f deploy/docker-compose.yml up -d
|
|
#
|
|
# Services:
|
|
# forgejo — Local git forge for agentic workflows (port 3000, SSH 2222)
|
|
# influxdb — Metrics and coordination (port 8181)
|
|
# inference — OpenAI-compatible proxy to M3 MLX or local vLLM (port 8080)
|
|
|
|
services:
|
|
# ── Forgejo — Local Agentic Git Forge ──
|
|
forgejo:
|
|
image: codeberg.org/forgejo/forgejo:10
|
|
container_name: lem-forgejo
|
|
restart: unless-stopped
|
|
ports:
|
|
- "3000:3000" # Web UI
|
|
- "2222:22" # SSH
|
|
volumes:
|
|
- forgejo-data:/data
|
|
- forgejo-config:/etc/gitea
|
|
environment:
|
|
- USER_UID=1000
|
|
- USER_GID=1000
|
|
- FORGEJO__server__ROOT_URL=http://localhost:3000/
|
|
- FORGEJO__server__SSH_PORT=2222
|
|
- FORGEJO__server__SSH_LISTEN_PORT=22
|
|
- FORGEJO__service__DISABLE_REGISTRATION=false
|
|
- FORGEJO__service__DEFAULT_ALLOW_CREATE_ORGANIZATION=true
|
|
- FORGEJO__federation__ENABLED=true
|
|
- FORGEJO__actions__ENABLED=true
|
|
- FORGEJO__database__DB_TYPE=sqlite3
|
|
- FORGEJO__database__PATH=/data/gitea/gitea.db
|
|
healthcheck:
|
|
test: ["CMD", "curl", "-fsSL", "http://localhost:3000/api/v1/version"]
|
|
interval: 30s
|
|
timeout: 5s
|
|
retries: 3
|
|
|
|
# ── InfluxDB v3 — Metrics & Coordination ──
|
|
influxdb:
|
|
image: quay.io/influxdb/influxdb3-core:latest
|
|
container_name: lem-influxdb
|
|
restart: unless-stopped
|
|
ports:
|
|
- "8181:8181"
|
|
volumes:
|
|
- influxdb-data:/var/lib/influxdb3
|
|
environment:
|
|
- INFLUXDB3_NODE_ID=lem-local
|
|
command: ["serve", "--host-id", "lem-local", "--object-store", "file", "--data-dir", "/var/lib/influxdb3"]
|
|
healthcheck:
|
|
test: ["CMD", "curl", "-fsSL", "http://localhost:8181/health"]
|
|
interval: 15s
|
|
timeout: 5s
|
|
retries: 5
|
|
|
|
# ── Inference Proxy — OpenAI-Compatible API ──
|
|
# Routes to M3 MLX server or local vLLM/llama.cpp.
|
|
# Override LEM_INFERENCE_BACKEND to point elsewhere.
|
|
inference:
|
|
image: nginx:alpine
|
|
container_name: lem-inference
|
|
restart: unless-stopped
|
|
ports:
|
|
- "8080:8080"
|
|
volumes:
|
|
- ./inference-proxy.conf:/etc/nginx/conf.d/default.conf:ro
|
|
environment:
|
|
- UPSTREAM_URL=${LEM_INFERENCE_BACKEND:-http://10.69.69.108:8090}
|
|
depends_on:
|
|
- influxdb
|
|
healthcheck:
|
|
test: ["CMD", "curl", "-fsSL", "http://localhost:8080/health"]
|
|
interval: 15s
|
|
timeout: 5s
|
|
retries: 3
|
|
|
|
volumes:
|
|
forgejo-data:
|
|
driver: local
|
|
forgejo-config:
|
|
driver: local
|
|
influxdb-data:
|
|
driver: local
|