fix(demo): migrate Reactive Resume to SeaweedFS, fix Kiwix/Apple Health

- Replace MinIO + Chrome with SeaweedFS (S3) + bucket init container
- Update Reactive Resume to v5 config (S3_* env vars, APP_URL, AUTH_SECRET)
- Fix Kiwix: smaller ZIM download, graceful fallback on failure, start_period
- Fix Apple Health: use InfluxDB ping() instead of deprecated ready()
- Remove stale RESUME_CHROME_TOKEN and RESUME_REFRESH_TOKEN_SECRET
- Add .yamllint config to relax line-length for compose template
- Update validate-all.sh to use local yamllint config and new image refs
- Update unit tests for createbucket service (replaces chrome)

💘 Generated with Crush

Assisted-by: GLM-5.1 via Crush <crush@charm.land>
This commit is contained in:
2026-05-08 14:22:57 -05:00
parent ad59acbc28
commit b286b0a305
7 changed files with 79 additions and 72 deletions

View File

@@ -617,54 +617,50 @@ services:
timeout: ${HEALTH_CHECK_TIMEOUT}
retries: 5
# Reactive Resume - Minio Storage
# Reactive Resume - SeaweedFS (S3 Storage)
reactiveresume-minio:
image: minio/minio
image: chrislusf/seaweedfs:latest
container_name: "${COMPOSE_PROJECT_NAME}-reactiveresume-minio"
restart: unless-stopped
command: server /data
command: server -s3 -filer -dir=/data -ip=0.0.0.0
networks:
- ${COMPOSE_NETWORK_NAME}
ports:
- "${RESUME_MINIO_PORT}:9000"
- "${RESUME_MINIO_PORT}:8333"
volumes:
- ${COMPOSE_PROJECT_NAME}_reactiveresume_minio_data:/data
environment:
MINIO_ROOT_USER: ${RESUME_MINIO_USER}
MINIO_ROOT_PASSWORD: ${RESUME_MINIO_PASSWORD}
AWS_ACCESS_KEY_ID: ${RESUME_MINIO_USER}
AWS_SECRET_ACCESS_KEY: ${RESUME_MINIO_PASSWORD}
deploy:
resources:
limits:
memory: 256M
healthcheck:
test: ["CMD", "curl", "-f", "--silent", "http://localhost:9000/minio/health/live"]
test: ["CMD", "wget", "-q", "-O", "/dev/null", "http://localhost:8888"]
interval: ${HEALTH_CHECK_INTERVAL}
timeout: ${HEALTH_CHECK_TIMEOUT}
retries: ${HEALTH_CHECK_RETRIES}
start_period: 10s
# Reactive Resume - Chrome (PDF Generation)
reactiveresume-chrome:
image: ghcr.io/browserless/chromium:latest
container_name: "${COMPOSE_PROJECT_NAME}-reactiveresume-chrome"
restart: unless-stopped
# Reactive Resume - Create S3 Bucket
reactiveresume-createbucket:
image: quay.io/minio/mc:latest
container_name: "${COMPOSE_PROJECT_NAME}-reactiveresume-createbucket"
restart: on-failure
networks:
- ${COMPOSE_NETWORK_NAME}
environment:
TIMEOUT: 10000
CONCURRENT: 10
TOKEN: ${RESUME_CHROME_TOKEN}
EXIT_ON_HEALTH_FAILURE: true
PRE_REQUEST_HEALTH_CHECK: true
deploy:
resources:
limits:
memory: 512M
healthcheck:
test: ["CMD", "curl", "-f", "--silent", "http://localhost:3000/health"]
interval: ${HEALTH_CHECK_INTERVAL}
timeout: ${HEALTH_CHECK_TIMEOUT}
retries: ${HEALTH_CHECK_RETRIES}
start_period: 30s
entrypoint:
- /bin/sh
- -c
- |
sleep 5
mc alias set seaweedfs http://reactiveresume-minio:8333 ${RESUME_MINIO_USER} ${RESUME_MINIO_PASSWORD}
mc mb seaweedfs/reactive-resume
exit 0
depends_on:
reactiveresume-minio:
condition: service_healthy
# Reactive Resume - Resume Builder
reactiveresume-app:
@@ -679,29 +675,20 @@ services:
reactiveresume-postgres:
condition: service_healthy
reactiveresume-minio:
condition: service_started
reactiveresume-chrome:
condition: service_started
condition: service_healthy
reactiveresume-createbucket:
condition: service_completed_successfully
environment:
PORT: 3000
NODE_ENV: production
PUBLIC_URL: http://localhost:${REACTIVE_RESUME_PORT}
STORAGE_URL: http://localhost:${RESUME_MINIO_PORT}/default
CHROME_TOKEN: ${RESUME_CHROME_TOKEN}
CHROME_URL: ws://reactiveresume-chrome:3000
APP_URL: http://localhost:${REACTIVE_RESUME_PORT}
DATABASE_URL: postgresql://${RESUME_POSTGRES_USER}:${RESUME_POSTGRES_PASSWORD}@reactiveresume-postgres:5432/${RESUME_POSTGRES_DB}
ACCESS_TOKEN_SECRET: ${RESUME_ACCESS_TOKEN_SECRET}
REFRESH_TOKEN_SECRET: ${RESUME_REFRESH_TOKEN_SECRET}
MAIL_FROM: noreply@localhost
STORAGE_ENDPOINT: reactiveresume-minio
STORAGE_PORT: 9000
STORAGE_REGION: us-east-1
STORAGE_BUCKET: default
STORAGE_ACCESS_KEY: ${RESUME_MINIO_USER}
STORAGE_SECRET_KEY: ${RESUME_MINIO_PASSWORD}
STORAGE_USE_SSL: "false"
STORAGE_SKIP_BUCKET_CHECK: "false"
AUTH_SECRET: ${RESUME_ACCESS_TOKEN_SECRET}
S3_ACCESS_KEY_ID: ${RESUME_MINIO_USER}
S3_SECRET_ACCESS_KEY: ${RESUME_MINIO_PASSWORD}
S3_ENDPOINT: http://reactiveresume-minio:8333
S3_BUCKET: reactive-resume
S3_FORCE_PATH_STYLE: "true"
labels:
homepage.group: "Productivity"
homepage.name: "Reactive Resume"
@@ -713,7 +700,7 @@ services:
limits:
memory: 512M
healthcheck:
test: ["CMD", "curl", "-f", "--silent", "http://localhost:3000/api/health"]
test: ["CMD", "node", "-e", "fetch('http://127.0.0.1:3000/api/health').then((r) => { if (!r.ok) process.exit(1); }).catch(() => process.exit(1));"]
interval: ${HEALTH_CHECK_INTERVAL}
timeout: ${HEALTH_CHECK_TIMEOUT}
retries: 5
@@ -763,14 +750,23 @@ services:
- "${KIWIX_PORT}:8080"
volumes:
- ${COMPOSE_PROJECT_NAME}_kiwix_data:/data
command: >
sh -c "
if [ -z \"$$(ls -A /data/*.zim 2>/dev/null)\" ]; then
echo 'No ZIM files found. Downloading Wikipedia Medical Encyclopedia...';
wget -q -O /data/wikipedia_en_medicine_maxi.zim 'https://download.kiwix.org/zim/wikipedia/wikipedia_en_medicine_maxi.zim' || echo 'Download failed - Kiwix will serve empty';
entrypoint: []
command:
- /bin/sh
- -c
- |
if ! ls /data/*.zim 1>/dev/null 2>&1; then
echo 'No ZIM files found. Downloading sample ZIM...';
wget -q -O /data/demo.zim
'https://download.kiwix.org/zim/other/bleedingedge_climate-change_en.zim'
|| echo 'Download failed';
fi
if ls /data/*.zim 1>/dev/null 2>&1; then
exec kiwix-serve /data/*.zim
else
echo 'No ZIM files available, sleeping indefinitely'
exec sleep infinity
fi
kiwix-serve /data/*.zim
"
environment:
- PUID=${DEMO_UID}
- PGID=${DEMO_GID}
@@ -788,7 +784,8 @@ services:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8080"]
interval: ${HEALTH_CHECK_INTERVAL}
timeout: ${HEALTH_CHECK_TIMEOUT}
retries: ${HEALTH_CHECK_RETRIES}
retries: 5
start_period: 120s
# Resume Matcher - AI Resume Screening
resumematcher: