Files
TSYSDevStack/SupportStack/demo/docker-compose.yml

546 lines
16 KiB
YAML

---
# TSYS Developer Support Stack - Docker Compose Configuration for Demo
#
# NOTE: This configuration uses only Docker named volumes for storage, not bind mounts.
# All data is ephemeral and will be lost when the stack is removed with 'docker compose down -v'.
# This is intentional for the demo environment to ensure no persistent data accumulation.
#
services:
# Homepage - Developer dashboard
homepage:
image: ghcr.io/gethomepage/homepage:latest
container_name: tsysdevstack-supportstack-homepage
ports:
- "${HOMEPAGE_PORT}:3000"
volumes:
- ./config:/app/config:ro,Z # Demo: read-only config mount for functionality
user: "${PUID}:${PGID}" # Run as non-root to prevent host file ownership issues
labels:
- "homepage.group=Developer Tools"
- "homepage.name=Homepage"
- "homepage.icon=homepage"
- "homepage.href=http://192.168.3.6:4000"
- "homepage.description=Developer dashboard and service discovery"
environment:
DOCKER_HOST: >
tcp://tsysdevstack-supportstack-docker-socket-proxy:${DOCKER_PROXY_PORT}
HOMEPAGE_ALLOWED_HOSTS: "*"
HOMEPAGE_VAR_DOCKER_HOST: >
tcp://tsysdevstack-supportstack-docker-socket-proxy:${DOCKER_PROXY_PORT}
# Use docker.yaml with explicit include/exclude filtering
depends_on:
- tsysdevstack-supportstack-docker-socket-proxy
restart: unless-stopped
# Atuin - Shell history
atuin:
image: ghcr.io/atuinsh/atuin:latest
container_name: tsysdevstack-supportstack-atuin
ports:
- "${ATUIN_PORT}:8888"
labels:
- "homepage.group=Developer Tools"
- "homepage.name=Atuin"
- "homepage.icon=atuin"
- "homepage.href=http://192.168.3.6:4001"
- "homepage.description=Synced shell history database"
environment:
ATUIN_HOST: "0.0.0.0"
ATUIN_PORT: "8888"
ATUIN_OPEN_REGISTRATION: "true"
ATUIN_DB_URI: >
postgres://atuin:demo_password@tsysdevstack-supportstack-postgres:5432/atuin
RUST_LOG: "debug,atuin_server=debug,sqlx=debug"
command: server start
# No persistent volumes for demo - config is ephemeral
user: "${PUID}:${PGID}"
depends_on:
tsysdevstack-supportstack-postgres:
condition: service_healthy
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "wget --no-verbose --tries=1 --spider http://localhost:8080/ || exit 1"
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
# Wakapi - Time tracking
wakapi:
image: ghcr.io/muety/wakapi:latest
container_name: tsysdevstack-supportstack-wakapi
ports:
- "${WAKAPI_PORT}:3000"
labels:
- "homepage.group=Developer Tools"
- "homepage.name=Wakapi"
- "homepage.icon=wakapi"
- "homepage.href=http://192.168.3.6:4002"
- "homepage.description=Self-hosted time tracking"
environment:
- WAKAPI_PASSWORD_SALT=demo_password
# No persistent volumes for demo - data is ephemeral
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "wget --no-verbose --tries=1 --spider http://localhost:8086/ping || exit 1"
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
# MailHog - Email testing
mailhog:
image: mailhog/mailhog:latest
container_name: tsysdevstack-supportstack-mailhog
ports:
- "1025:1025"
- "${MAILHOG_PORT}:8025"
labels:
- "homepage.group=Developer Tools"
- "homepage.name=MailHog"
- "homepage.icon=mailhog"
- "homepage.href=http://192.168.3.6:4005"
- "homepage.description=Email testing service"
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "echo > /dev/tcp/localhost:8025"
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 128M
cpus: '0.25'
# Pi-hole - DNS management
pihole:
image: pihole/pihole:latest
container_name: tsysdevstack-supportstack-pihole
# Note: Pi-hole requires root for DNS capabilities and file permissions
ports:
- "${PIHOLE_PORT}:80"
- "53:53/tcp"
- "53:53/udp"
- "67:67/udp"
labels:
- "homepage.group=Infrastructure"
- "homepage.name=Pi-hole"
- "homepage.icon=pihole"
- "homepage.href=http://192.168.3.6:4006"
- "homepage.description=DNS-based ad blocking and network monitoring"
environment:
- TZ=UTC
- WEBPASSWORD=${DEMO_PASSWORD}
- PIHOLE_DNS_=1.1.1.1;1.0.0.1
- DNSMASQ_LISTENING=all
- WEBTHEME=default-dark
# No persistent volumes for demo - configuration is ephemeral
# Pi-hole requires root for DNS functionality
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "echo > /dev/tcp/localhost/80"
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
deploy:
resources:
limits:
memory: 512M
cpus: '0.75'
# Portainer - Container management
portainer:
image: portainer/portainer-ce:latest
container_name: tsysdevstack-supportstack-portainer
ports:
- "${PORTAINER_PORT}:9000"
labels:
- "homepage.group=Infrastructure"
- "homepage.name=Portainer"
- "homepage.icon=portainer"
- "homepage.href=http://192.168.3.6:4007"
- "homepage.description=Container management interface"
# No persistent volumes for demo - data is ephemeral
# Note: Portainer needs root for data directory permissions
environment:
DOCKER_HOST: >
tcp://tsysdevstack-supportstack-docker-socket-proxy:${DOCKER_PROXY_PORT}
# Portainer needs root for data directory permissions
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "echo > /dev/tcp/localhost/9000"
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
# ArchiveBox - Web archiving
archivebox:
image: archivebox/archivebox:latest
container_name: tsysdevstack-supportstack-archivebox
ports:
- "${ARCHIVEBOX_PORT}:8000"
labels:
- "homepage.group=Developer Tools"
- "homepage.name=ArchiveBox"
- "homepage.icon=archivebox"
- "homepage.href=http://192.168.3.6:4003"
- "homepage.description=Self-hosted web archiving"
environment:
- ALLOWED_HOSTS=*
- MEDIA_MAX_SIZE=750m
# No persistent volumes for demo - data is ephemeral
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "wget --no-verbose --tries=1 --spider http://localhost:3000/api/health || exit 1"
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
# InfluxDB - Time series database
influxdb:
image: influxdb:2.7-alpine
container_name: tsysdevstack-supportstack-influxdb
ports:
- "${INFLUXDB_PORT}:8086"
labels:
- "homepage.group=Monitoring"
- "homepage.name=InfluxDB"
- "homepage.icon=influxdb"
- "homepage.href=http://192.168.3.6:4008"
- "homepage.description=Time series database for metrics storage"
environment:
- DOCKER_INFLUXDB_INIT_MODE=setup
- DOCKER_INFLUXDB_INIT_USERNAME=admin
- DOCKER_INFLUXDB_INIT_PASSWORD=${DEMO_PASSWORD}
- DOCKER_INFLUXDB_INIT_ORG=tsysdev
- DOCKER_INFLUXDB_INIT_BUCKET=metrics
# No persistent volumes for demo - data is ephemeral
user: "${PUID}:${PGID}"
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "wget --no-verbose --tries=1 --spider http://localhost:8086/ping || exit 1"
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
# Grafana - Analytics visualization
grafana:
image: grafana/grafana:latest
container_name: tsysdevstack-supportstack-grafana
ports:
- "${GRAFANA_PORT}:3000"
labels:
- "homepage.group=Monitoring"
- "homepage.name=Grafana"
- "homepage.icon=grafana"
- "homepage.href=http://192.168.3.6:4009"
- "homepage.description=Analytics and visualization platform"
environment:
- GF_SECURITY_ADMIN_USER=admin
- GF_SECURITY_ADMIN_PASSWORD=${DEMO_PASSWORD}
- GF_INSTALL_PLUGINS=grafana-clock-panel,grafana-simple-json-datasource
- GF_SERVER_DOMAIN=192.168.3.6:4009
- GF_SERVER_ROOT_URL=http://192.168.3.6:4009
# No persistent volumes for demo - data is ephemeral
user: "${PUID}:${PGID}"
depends_on:
- influxdb
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "echo > /dev/tcp/localhost/3000"
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 512M
cpus: '0.75'
# Draw.io - Diagramming
drawio:
image: jgraph/drawio:latest
container_name: tsysdevstack-supportstack-drawio
user: "${PUID}:${PGID}"
ports:
- "${DRAWIO_PORT}:8080"
labels:
- "homepage.group=Documentation"
- "homepage.name=Draw.io"
- "homepage.icon=diagram"
- "homepage.href=http://192.168.3.6:4010"
- "homepage.description=Web-based diagramming tool"
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "echo > /dev/tcp/localhost/3000"
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
# Tube Archivist - YouTube media archiving
tube-archivist:
image: bbilly1/tubearchivist:latest
container_name: tsysdevstack-supportstack-tube-archivist
ports:
- "4004:8000"
labels:
- "homepage.group=Developer Tools"
- "homepage.name=Tube Archivist"
- "homepage.icon=youtube"
- "homepage.href=http://192.168.3.6:4004"
- "homepage.description=Self-hosted YouTube media archiving"
environment:
- TA_HOST=192.168.3.6:4004
- TA_USERNAME=demo_user
- TA_PASSWORD=demo_password
- ES_URL=http://elastic:demo_password@tsysdevstack-supportstack-elasticsearch:9200
- ELASTIC_PASSWORD=demo_password
- REDIS_CON=redis://tsysdevstack-supportstack-redis:6379
- TZ=UTC
depends_on:
tsysdevstack-supportstack-elasticsearch:
condition: service_healthy
tsysdevstack-supportstack-redis:
condition: service_healthy
# No persistent volumes for demo - cache and media are ephemeral
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:8000/ || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 120s
deploy:
resources:
limits:
memory: 512M
cpus: '1.0'
# Kroki - Diagrams as code
kroki:
image: yuzutech/kroki:latest
container_name: tsysdevstack-supportstack-kroki
user: "${PUID}:${PGID}"
ports:
- "${KROKI_PORT}:8000"
labels:
- "homepage.group=Documentation"
- "homepage.name=Kroki"
- "homepage.icon=diagram"
- "homepage.href=http://192.168.3.6:4011"
- "homepage.description=Service for converting text diagrams to images"
environment:
- KROKI_SAFE_MODE=0
- KROKI_PLANTUML_ALLOW_LIST=*
- KROKI_MERMAID_ALLOW_LIST=*
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "wget --no-verbose --tries=1 --spider http://localhost:8000/health || exit 1"
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
# Docker Socket Proxy - Secure Docker API access
tsysdevstack-supportstack-docker-socket-proxy:
image: tecnativa/docker-socket-proxy:latest
container_name: tsysdevstack-supportstack-docker-socket-proxy
ports:
- "${DOCKER_PROXY_PORT}:2375"
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
environment:
- CONTAINERS=1
- SERVICES=1
- TASKS=1
- NETWORKS=1
- NODES=1
- IMAGES=1
- VOLUMES=1
- EXEC=0
- SECRETS=0
- CONFIGS=0
- PLUGINS=0
- SYSTEM=0
- INFO=1
- VERSION=1
- EVENTS=0
- POST=0
- DELETE=0
- PUT=0
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- "wget --no-verbose --tries=1 --spider http://localhost:2375/version || exit 1"
interval: 30s
timeout: 10s
retries: 3
start_period: 60s
deploy:
resources:
limits:
memory: 128M
cpus: '0.25'
# PostgreSQL - Database for Atuin
tsysdevstack-supportstack-postgres:
image: postgres:15-alpine
container_name: tsysdevstack-supportstack-postgres
labels:
- "homepage.group=Infrastructure"
- "homepage.name=PostgreSQL"
- "homepage.icon=postgresql"
- "homepage.description=PostgreSQL database server"
environment:
- POSTGRES_DB=atuin
- POSTGRES_USER=atuin
- POSTGRES_PASSWORD=${DEMO_PASSWORD}
# No persistent volumes for demo - data is ephemeral
restart: unless-stopped
healthcheck:
test:
- CMD
- pg_isready
- -U
- atuin
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 256M
cpus: '0.5'
# Elasticsearch - Search engine
tsysdevstack-supportstack-elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:8.11.1
container_name: tsysdevstack-supportstack-elasticsearch
labels:
- "homepage.group=Infrastructure"
- "homepage.name=Elasticsearch"
- "homepage.icon=elasticsearch"
- "homepage.description=Search and analytics engine"
environment:
- "discovery.type=single-node"
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
- "xpack.security.enabled=true"
- "ELASTIC_PASSWORD=${DEMO_PASSWORD}"
- "path.repo=/usr/share/elasticsearch/data/snapshot"
# No persistent volumes for demo - data is ephemeral
user: "${PUID}:${PGID}"
restart: unless-stopped
healthcheck:
test:
- CMD-SHELL
- >
curl -f -u elastic:demo_password
http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=30s || exit 1
interval: 30s
timeout: 10s
retries: 5
start_period: 120s
deploy:
resources:
limits:
memory: 1G
cpus: '1.0'
# Redis for Tube Archivist
tsysdevstack-supportstack-redis:
image: redis:7-alpine
container_name: tsysdevstack-supportstack-redis
labels:
- "homepage.group=Infrastructure"
- "homepage.name=Redis"
- "homepage.icon=redis"
- "homepage.description=In-memory data structure store"
restart: unless-stopped
command: redis-server --save 60 1 --stop-writes-on-bgsave-error no
healthcheck:
test:
- CMD
- redis-cli
- ping
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 128M
cpus: '0.25'
# No persistent volumes - demo is 100% ephemeral
# Only Docker socket bind mount is used for functionality
networks:
default:
name: tsysdevstack_supportstack