Scaffold Cloudron packaging workspace
This commit is contained in:
@@ -1,83 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
REPO_ROOT=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)
|
||||
CI_IMAGE=${CI_IMAGE:-knel/cloudron-ci}
|
||||
BUILD=${BUILD:-0}
|
||||
|
||||
ensure_image() {
|
||||
if [[ "${BUILD}" == "1" ]]; then
|
||||
docker build -t "${CI_IMAGE}" "${REPO_ROOT}/docker/ci-runner"
|
||||
return
|
||||
fi
|
||||
|
||||
if ! docker image inspect "${CI_IMAGE}" >/dev/null 2>&1; then
|
||||
docker build -t "${CI_IMAGE}" "${REPO_ROOT}/docker/ci-runner"
|
||||
fi
|
||||
}
|
||||
|
||||
run_in_ci() {
|
||||
local cmd=$1
|
||||
docker run --rm -t \
|
||||
-v "${REPO_ROOT}:/workspace" \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
"${CI_IMAGE}" "${cmd}"
|
||||
}
|
||||
|
||||
lint() {
|
||||
run_in_ci 'git config --global --add safe.directory /workspace && make lint && make status && git diff --exit-code docs/APP_STATUS.md'
|
||||
}
|
||||
|
||||
packager_smoke() {
|
||||
run_in_ci 'git config --global --add safe.directory /workspace && docker build -t knel/cloudron-packager-test docker/packager && docker run --rm knel/cloudron-packager-test cloudron --help'
|
||||
}
|
||||
|
||||
usage() {
|
||||
cat <<USAGE
|
||||
Usage: $(basename "$0") [lint|status|packager-smoke|all]
|
||||
|
||||
Options:
|
||||
lint Run make lint + status and verify dashboard is committed
|
||||
status Regenerate docs/APP_STATUS.md
|
||||
packager-smoke Build the helper packager image and run cloudron --help
|
||||
all Execute lint and packager-smoke
|
||||
|
||||
Environment variables:
|
||||
BUILD=1 Force rebuild of the CI image before running tasks
|
||||
CI_IMAGE Override the CI harness image tag (default: knel/cloudron-ci)
|
||||
USAGE
|
||||
}
|
||||
|
||||
status_only() {
|
||||
run_in_ci 'git config --global --add safe.directory /workspace && make status'
|
||||
}
|
||||
|
||||
main() {
|
||||
ensure_image
|
||||
local target=${1:-all}
|
||||
case "${target}" in
|
||||
lint)
|
||||
lint
|
||||
;;
|
||||
status)
|
||||
status_only
|
||||
;;
|
||||
packager-smoke)
|
||||
packager_smoke
|
||||
;;
|
||||
all)
|
||||
lint
|
||||
packager_smoke
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echo "Unknown target: ${target}" >&2
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
main "$@"
|
@@ -1,8 +1,10 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Produce status documentation for all apps."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import pathlib
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Dict, List
|
||||
|
||||
@@ -30,7 +32,7 @@ def detect_status(app_dir: pathlib.Path) -> str:
|
||||
placeholders = 0
|
||||
if "TODO" in json.dumps(manifest):
|
||||
placeholders += 1
|
||||
if "Replace start.sh" in start_script:
|
||||
if "not implemented" in start_script:
|
||||
placeholders += 1
|
||||
|
||||
if placeholders == 0:
|
||||
@@ -50,7 +52,27 @@ def render_table(rows: List[Dict[str, str]]) -> str:
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Generate docs/APP_STATUS.md")
|
||||
parser.add_argument(
|
||||
"--preserve-timestamp",
|
||||
action="store_true",
|
||||
help="Reuse the existing timestamp if the status file already exists",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def extract_existing_timestamp(path: pathlib.Path) -> str | None:
|
||||
if not path.exists():
|
||||
return None
|
||||
match = re.search(r"_Updated: ([0-9T:-]+Z)_", path.read_text(encoding="utf-8"))
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
catalog = load_catalog()
|
||||
rows: List[Dict[str, str]] = []
|
||||
for entry in catalog:
|
||||
@@ -65,7 +87,13 @@ def main() -> None:
|
||||
"issue": entry.get("issue", "")
|
||||
})
|
||||
|
||||
output = ["# Application Status", "", f"_Updated: {datetime.utcnow().isoformat(timespec='seconds')}Z_", "", render_table(rows)]
|
||||
timestamp = datetime.utcnow().isoformat(timespec='seconds') + "Z"
|
||||
if args.preserve_timestamp:
|
||||
existing = extract_existing_timestamp(ROOT / "docs" / "APP_STATUS.md")
|
||||
if existing:
|
||||
timestamp = existing
|
||||
|
||||
output = ["# Application Status", "", f"_Updated: {timestamp}_", "", render_table(rows)]
|
||||
status_path = ROOT / "docs" / "APP_STATUS.md"
|
||||
status_path.write_text("\n".join(output) + "\n", encoding="utf-8")
|
||||
print(f"Updated {status_path.relative_to(ROOT)}")
|
||||
|
@@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
REPO_ROOT=$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)
|
||||
HOOK_DIR="${REPO_ROOT}/.git/hooks"
|
||||
|
||||
if [[ ! -d "${HOOK_DIR}" ]]; then
|
||||
echo "This script must be run inside a git repository." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for hook in pre-commit post-commit pre-push; do
|
||||
ln -sf "${REPO_ROOT}/scripts/hooks/${hook}" "${HOOK_DIR}/${hook}"
|
||||
chmod +x "${HOOK_DIR}/${hook}"
|
||||
echo "Installed ${hook} hook"
|
||||
done
|
@@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${SKIP_CI_HOOKS:-}" == "1" ]]; then
|
||||
echo "Skipping CI hooks because SKIP_CI_HOOKS=1"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
REPO_ROOT=$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)
|
||||
|
||||
${REPO_ROOT}/scripts/ci_local.sh status || true
|
@@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${SKIP_CI_HOOKS:-}" == "1" ]]; then
|
||||
echo "Skipping CI hooks because SKIP_CI_HOOKS=1"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
REPO_ROOT=$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)
|
||||
|
||||
${REPO_ROOT}/scripts/ci_local.sh lint
|
@@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${SKIP_CI_HOOKS:-}" == "1" ]]; then
|
||||
echo "Skipping CI hooks because SKIP_CI_HOOKS=1"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
REPO_ROOT=$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)
|
||||
|
||||
${REPO_ROOT}/scripts/ci_local.sh packager-smoke
|
@@ -1,96 +1,236 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Basic sanity checks for Cloudron packaging scaffolds."""
|
||||
"""Lint Cloudron app scaffolds."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Dict, List
|
||||
import subprocess
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterable, List, Sequence, Tuple
|
||||
|
||||
try:
|
||||
import jsonschema
|
||||
except ModuleNotFoundError as exc: # pragma: no cover - guidance for local execution
|
||||
raise SystemExit(
|
||||
"jsonschema is required. Run this script through './run/dev.sh python scripts/lint_repo.py ...' so dependencies come from the devtools container."
|
||||
) from exc
|
||||
|
||||
ROOT = pathlib.Path(__file__).resolve().parents[1]
|
||||
EXPECTED_BASE = os.environ.get("CLOUDRON_BASE", "cloudron/base:5.0.0")
|
||||
SCHEMA_PATH = ROOT / "schema" / "cloudron-manifest.schema.json"
|
||||
DEFAULT_BASE_IMAGE_PREFIX = "cloudron/base"
|
||||
|
||||
|
||||
def find_apps(apps_dir: pathlib.Path) -> List[pathlib.Path]:
|
||||
@dataclass
|
||||
class Issue:
|
||||
severity: str # "error" or "warning"
|
||||
message: str
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.severity.upper()}: {self.message}"
|
||||
|
||||
|
||||
def load_schema() -> Dict[str, object]:
|
||||
with SCHEMA_PATH.open("r", encoding="utf-8") as handle:
|
||||
return json.load(handle)
|
||||
|
||||
|
||||
def list_apps(apps_dir: pathlib.Path) -> List[pathlib.Path]:
|
||||
return sorted(p for p in apps_dir.iterdir() if p.is_dir())
|
||||
|
||||
|
||||
def check_manifest(app_dir: pathlib.Path) -> List[str]:
|
||||
issues: List[str] = []
|
||||
manifest = app_dir / "CloudronManifest.json"
|
||||
if not manifest.exists():
|
||||
issues.append("missing CloudronManifest.json")
|
||||
return issues
|
||||
def resolve_slugs_from_paths(paths: Sequence[str]) -> List[str]:
|
||||
slugs = set()
|
||||
for path in paths:
|
||||
parts = pathlib.PurePosixPath(path).parts
|
||||
if len(parts) >= 2 and parts[0] == "apps":
|
||||
slugs.add(parts[1])
|
||||
return sorted(slugs)
|
||||
|
||||
|
||||
def collect_paths_from_git(diff_target: str) -> List[str]:
|
||||
if not diff_target:
|
||||
return []
|
||||
result = subprocess.run(
|
||||
["git", "diff", "--name-only", diff_target],
|
||||
cwd=ROOT,
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return []
|
||||
return [line.strip() for line in result.stdout.splitlines() if line.strip()]
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Lint Cloudron app scaffolds")
|
||||
parser.add_argument("--slug", action="append", dest="slugs", help="Limit linting to the provided slug")
|
||||
parser.add_argument("--path", action="append", dest="paths", help="Infer slugs from changed file paths")
|
||||
parser.add_argument("--git-diff", default=None, help="Infer paths from git diff target (e.g. HEAD)")
|
||||
parser.add_argument("--strict", action="store_true", help="Treat placeholder warnings as errors")
|
||||
parser.add_argument("--base-prefix", default=DEFAULT_BASE_IMAGE_PREFIX, help="Expected base image prefix")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def load_manifest(manifest_path: pathlib.Path, schema: Dict[str, object]) -> Tuple[Dict[str, object] | None, List[Issue]]:
|
||||
issues: List[Issue] = []
|
||||
if not manifest_path.exists():
|
||||
issues.append(Issue("error", "missing CloudronManifest.json"))
|
||||
return None, issues
|
||||
try:
|
||||
data = json.loads(manifest.read_text(encoding="utf-8"))
|
||||
data = json.loads(manifest_path.read_text(encoding="utf-8"))
|
||||
except json.JSONDecodeError as exc:
|
||||
issues.append(f"manifest JSON invalid: {exc}")
|
||||
return issues
|
||||
for key in ("id", "title", "version"):
|
||||
if not data.get(key):
|
||||
issues.append(f"manifest missing {key}")
|
||||
tagline = data.get("tagline", "")
|
||||
description = data.get("description", "")
|
||||
if "TODO" in tagline:
|
||||
issues.append("manifest tagline still contains TODO placeholder")
|
||||
if "TODO" in description:
|
||||
issues.append("manifest description still contains TODO placeholder")
|
||||
return issues
|
||||
issues.append(Issue("error", f"manifest JSON invalid: {exc}"))
|
||||
return None, issues
|
||||
|
||||
try:
|
||||
jsonschema.validate(data, schema)
|
||||
except jsonschema.ValidationError as exc:
|
||||
issues.append(Issue("error", f"manifest schema violation: {exc.message}"))
|
||||
|
||||
if "TODO" in data.get("tagline", ""):
|
||||
issues.append(Issue("warning", "manifest tagline still contains TODO placeholder"))
|
||||
if "TODO" in data.get("description", ""):
|
||||
issues.append(Issue("warning", "manifest description still contains TODO placeholder"))
|
||||
|
||||
return data, issues
|
||||
|
||||
|
||||
def check_dockerfile(app_dir: pathlib.Path) -> List[str]:
|
||||
issues: List[str] = []
|
||||
dockerfile = app_dir / "Dockerfile"
|
||||
def parse_dockerfile(dockerfile: pathlib.Path) -> Tuple[str | None, List[str]]:
|
||||
import re
|
||||
|
||||
if not dockerfile.exists():
|
||||
issues.append("missing Dockerfile")
|
||||
return issues
|
||||
first_from = None
|
||||
return None, ["missing Dockerfile"]
|
||||
|
||||
stage_sources: Dict[str, str] = {}
|
||||
stage_order: List[str] = []
|
||||
final_reference: str | None = None
|
||||
|
||||
pattern = re.compile(
|
||||
r"^FROM\s+(?:(?:--[\w=/\-:.]+\s+)+)?(?P<image>[\w./:@+-]+)(?:\s+AS\s+(?P<alias>[A-Za-z0-9_\-\.]+))?",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
for line in dockerfile.read_text(encoding="utf-8").splitlines():
|
||||
line = line.strip()
|
||||
if line.startswith("FROM "):
|
||||
first_from = line.split()[1]
|
||||
break
|
||||
if first_from != EXPECTED_BASE:
|
||||
issues.append(f"Dockerfile base image '{first_from}' != '{EXPECTED_BASE}'")
|
||||
return issues
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("#"):
|
||||
continue
|
||||
if not stripped.upper().startswith("FROM"):
|
||||
continue
|
||||
match = pattern.match(stripped)
|
||||
if not match:
|
||||
continue
|
||||
image = match.group("image")
|
||||
alias = match.group("alias")
|
||||
stage_name = alias or f"__stage_{len(stage_order)}"
|
||||
stage_sources[stage_name] = image
|
||||
stage_order.append(stage_name)
|
||||
final_reference = stage_name
|
||||
|
||||
if final_reference is None:
|
||||
return None, ["Dockerfile does not define any FROM instructions"]
|
||||
|
||||
return resolve_final_image(final_reference, stage_sources), []
|
||||
|
||||
|
||||
def check_start_script(app_dir: pathlib.Path) -> List[str]:
|
||||
issues: List[str] = []
|
||||
start = app_dir / "start.sh"
|
||||
if not start.exists():
|
||||
issues.append("missing start.sh")
|
||||
def resolve_final_image(stage: str, stage_sources: Dict[str, str]) -> str:
|
||||
seen = set()
|
||||
current = stage
|
||||
while True:
|
||||
source = stage_sources.get(current)
|
||||
if source is None:
|
||||
return current
|
||||
if source not in stage_sources:
|
||||
return source
|
||||
if source in seen:
|
||||
# Circular reference; return last known reference to avoid infinite loop
|
||||
return source
|
||||
seen.add(source)
|
||||
current = source
|
||||
|
||||
|
||||
def lint_dockerfile(dockerfile: pathlib.Path, base_prefix: str) -> List[Issue]:
|
||||
issues: List[Issue] = []
|
||||
final_image, errors = parse_dockerfile(dockerfile)
|
||||
if errors:
|
||||
for err in errors:
|
||||
issues.append(Issue("error", err))
|
||||
return issues
|
||||
mode = start.stat().st_mode
|
||||
if not mode & 0o111:
|
||||
issues.append("start.sh is not executable")
|
||||
if "Replace start.sh" in start.read_text(encoding="utf-8"):
|
||||
issues.append("start.sh still contains placeholder command")
|
||||
assert final_image is not None
|
||||
if not final_image.startswith(base_prefix):
|
||||
issues.append(
|
||||
Issue(
|
||||
"error",
|
||||
f"final Docker image '{final_image}' does not use expected base prefix '{base_prefix}'",
|
||||
)
|
||||
)
|
||||
content = dockerfile.read_text(encoding="utf-8")
|
||||
if "chown -R cloudron:cloudron" not in content:
|
||||
issues.append(Issue("warning", "Dockerfile missing chown step for /app"))
|
||||
return issues
|
||||
|
||||
|
||||
def main() -> int:
|
||||
def lint_start_script(start_script: pathlib.Path) -> List[Issue]:
|
||||
issues: List[Issue] = []
|
||||
if not start_script.exists():
|
||||
issues.append(Issue("error", "missing start.sh"))
|
||||
return issues
|
||||
if not os.access(start_script, os.X_OK):
|
||||
issues.append(Issue("error", "start.sh is not executable"))
|
||||
text = start_script.read_text(encoding="utf-8")
|
||||
if "not implemented" in text:
|
||||
issues.append(Issue("warning", "start.sh still contains not-implemented placeholder"))
|
||||
return issues
|
||||
|
||||
|
||||
def lint_app(app_dir: pathlib.Path, base_prefix: str, schema: Dict[str, object]) -> List[Issue]:
|
||||
_, manifest_issues = load_manifest(app_dir / "CloudronManifest.json", schema)
|
||||
dockerfile_issues = lint_dockerfile(app_dir / "Dockerfile", base_prefix)
|
||||
start_issues = lint_start_script(app_dir / "start.sh")
|
||||
return manifest_issues + dockerfile_issues + start_issues
|
||||
|
||||
|
||||
def select_slugs(apps_dir: pathlib.Path, args: argparse.Namespace) -> List[str]:
|
||||
slugs = set(args.slugs or [])
|
||||
if args.paths:
|
||||
slugs.update(resolve_slugs_from_paths(args.paths))
|
||||
if args.git_diff:
|
||||
slugs.update(resolve_slugs_from_paths(collect_paths_from_git(args.git_diff)))
|
||||
if not slugs:
|
||||
return [path.name for path in list_apps(apps_dir)]
|
||||
return sorted(slugs)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
apps_dir = ROOT / "apps"
|
||||
if not apps_dir.exists():
|
||||
print("No apps directory present", file=sys.stderr)
|
||||
return 1
|
||||
failures = 0
|
||||
for app_dir in find_apps(apps_dir):
|
||||
app_issues: List[str] = []
|
||||
app_issues.extend(check_manifest(app_dir))
|
||||
app_issues.extend(check_dockerfile(app_dir))
|
||||
app_issues.extend(check_start_script(app_dir))
|
||||
if app_issues:
|
||||
failures += 1
|
||||
print(f"[FAIL] {app_dir.relative_to(ROOT)}")
|
||||
for issue in app_issues:
|
||||
print(f" - {issue}")
|
||||
if failures:
|
||||
print(f"\n{failures} app(s) require updates", file=sys.stderr)
|
||||
return 2
|
||||
print("All apps passed lint checks")
|
||||
return 0
|
||||
schema = load_schema()
|
||||
|
||||
slugs = select_slugs(apps_dir, args)
|
||||
|
||||
hard_failures = 0
|
||||
for slug in slugs:
|
||||
app_dir = apps_dir / slug
|
||||
if not app_dir.exists():
|
||||
print(f"[SKIP] {slug}: directory does not exist")
|
||||
continue
|
||||
issues = lint_app(app_dir, args.base_prefix, schema)
|
||||
if not issues:
|
||||
print(f"[OK] {slug}")
|
||||
continue
|
||||
print(f"[ISSUES] {slug}")
|
||||
for issue in issues:
|
||||
print(f" - {issue}")
|
||||
for issue in issues:
|
||||
if issue.severity == "error" or (issue.severity == "warning" and args.strict):
|
||||
hard_failures += 1
|
||||
|
||||
if hard_failures:
|
||||
raise SystemExit(f"Lint failed with {hard_failures} blocking issue(s)")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
main()
|
||||
|
@@ -1,13 +1,21 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import datetime
|
||||
import datetime as dt
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
from typing import Dict, List
|
||||
from typing import Dict, Iterable, List, Sequence
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ModuleNotFoundError as exc: # pragma: no cover - guidance for local execution
|
||||
raise SystemExit(
|
||||
"Jinja2 is required. Run this script via './run/dev.sh python scripts/new_app.py ...' so dependencies are provided by the devtools container."
|
||||
) from exc
|
||||
|
||||
ROOT = pathlib.Path(__file__).resolve().parents[1]
|
||||
DEFAULT_BASE_IMAGE = "cloudron/base:5.0.0"
|
||||
DEFAULT_BUILDER_IMAGE = "cloudron/base:5.0.0"
|
||||
|
||||
|
||||
def load_catalog(path: pathlib.Path) -> List[Dict[str, object]]:
|
||||
@@ -16,109 +24,124 @@ def load_catalog(path: pathlib.Path) -> List[Dict[str, object]]:
|
||||
|
||||
|
||||
def build_app_id(slug: str) -> str:
|
||||
sanitized = ''.join(ch.lower() if ch.isalnum() else '.' for ch in slug)
|
||||
sanitized = '.'.join(filter(None, sanitized.split('.')))
|
||||
return f"com.knel.{sanitized}"
|
||||
import re
|
||||
|
||||
normalized = slug.lower()
|
||||
normalized = re.sub(r"[^a-z0-9]+", ".", normalized)
|
||||
normalized = re.sub(r"\.\.+", ".", normalized).strip(".")
|
||||
if not normalized:
|
||||
raise ValueError(f"Unable to derive Cloudron app id from slug '{slug}'")
|
||||
return f"com.knel.{normalized}"
|
||||
|
||||
|
||||
def default_tags() -> List[str]:
|
||||
return ["custom", "known-element"]
|
||||
|
||||
|
||||
def default_placeholder_map(entry: Dict[str, object]) -> Dict[str, str]:
|
||||
slug = entry["slug"]
|
||||
title = entry["title"]
|
||||
repo = entry["repo"]
|
||||
issue = entry.get("issue", "")
|
||||
website = entry.get("website") or repo.rstrip(".git")
|
||||
description = entry.get("description") or "TODO: Add package description."
|
||||
tags = entry.get("tags") or default_tags()
|
||||
http_port = str(entry.get("httpPort", 3000))
|
||||
base_image = entry.get("baseImage", "cloudron/base:5.0.0")
|
||||
|
||||
placeholder_map = {
|
||||
"{{APP_ID}}": build_app_id(slug),
|
||||
"{{APP_TITLE}}": title,
|
||||
"{{APP_SLUG}}": slug,
|
||||
"{{APP_REPO}}": repo,
|
||||
"{{APP_ISSUE}}": issue,
|
||||
"{{APP_WEBSITE}}": website,
|
||||
"{{APP_DESCRIPTION}}": description,
|
||||
"{{HTTP_PORT}}": http_port,
|
||||
"{{BASE_IMAGE}}": base_image,
|
||||
"{{APP_TAGS}}": json.dumps(tags)
|
||||
}
|
||||
|
||||
return placeholder_map
|
||||
|
||||
|
||||
def render_templates(template_dir: pathlib.Path, destination: pathlib.Path, entry: Dict[str, object]) -> None:
|
||||
shutil.copytree(template_dir, destination, dirs_exist_ok=True)
|
||||
|
||||
placeholders = default_placeholder_map(entry)
|
||||
|
||||
for path in destination.rglob('*'):
|
||||
if path.is_dir():
|
||||
continue
|
||||
text = path.read_text(encoding='utf-8')
|
||||
for key, value in placeholders.items():
|
||||
text = text.replace(key, value)
|
||||
path.write_text(text, encoding='utf-8')
|
||||
|
||||
# Ensure critical scripts are executable
|
||||
for relpath in ["start.sh", "test/smoke.sh"]:
|
||||
target = destination / relpath
|
||||
if target.exists():
|
||||
mode = target.stat().st_mode
|
||||
target.chmod(mode | 0o111)
|
||||
|
||||
# Drop metadata file for bookkeeping
|
||||
metadata = {
|
||||
"slug": entry["slug"],
|
||||
"title": entry["title"],
|
||||
"issue": entry.get("issue"),
|
||||
"repo": entry.get("repo"),
|
||||
"additionalRepos": entry.get("additionalRepos", []),
|
||||
"created": datetime.datetime.utcnow().replace(microsecond=0).isoformat() + "Z",
|
||||
"notes": entry.get("notes", "TODO: capture packaging notes")
|
||||
}
|
||||
with (destination / "metadata.json").open("w", encoding="utf-8") as handle:
|
||||
json.dump(metadata, handle, indent=2)
|
||||
|
||||
|
||||
def ensure_app_directory(entry: Dict[str, object], template_dir: pathlib.Path, apps_dir: pathlib.Path, force: bool) -> pathlib.Path:
|
||||
app_dir = apps_dir / entry["slug"]
|
||||
def ensure_clean_destination(app_dir: pathlib.Path, force: bool) -> None:
|
||||
if app_dir.exists():
|
||||
if not force:
|
||||
raise FileExistsError(f"Destination {app_dir} already exists. Use --force to overwrite.")
|
||||
raise FileExistsError(
|
||||
f"Destination {app_dir} already exists. Use --force to overwrite or remove it manually."
|
||||
)
|
||||
shutil.rmtree(app_dir)
|
||||
render_templates(template_dir, app_dir, entry)
|
||||
app_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def render_templates(template_dir: pathlib.Path, destination: pathlib.Path, context: Dict[str, object]) -> None:
|
||||
env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(str(template_dir)),
|
||||
keep_trailing_newline=True,
|
||||
autoescape=False,
|
||||
)
|
||||
|
||||
for source in template_dir.rglob("*"):
|
||||
if source.is_dir():
|
||||
continue
|
||||
relative = source.relative_to(template_dir)
|
||||
target = destination / relative
|
||||
|
||||
if source.suffix == ".j2":
|
||||
rendered_path = target.with_suffix("")
|
||||
rendered_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
template = env.get_template(str(relative))
|
||||
rendered_path.write_text(template.render(**context), encoding="utf-8")
|
||||
else:
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(source, target)
|
||||
|
||||
for relpath in ("start.sh", "test/smoke.sh"):
|
||||
path = destination / relpath
|
||||
if path.exists():
|
||||
path.chmod(path.stat().st_mode | 0o111)
|
||||
|
||||
|
||||
def build_context(entry: Dict[str, object]) -> Dict[str, object]:
|
||||
generated_at = dt.datetime.utcnow().replace(microsecond=0).isoformat() + "Z"
|
||||
tags = entry.get("tags") or ["custom", "known-element"]
|
||||
website = entry.get("website") or entry.get("repo", "").rstrip(".git")
|
||||
http_port = int(entry.get("httpPort", 3000))
|
||||
base_image = entry.get("baseImage", DEFAULT_BASE_IMAGE)
|
||||
builder_image = entry.get("builderImage", DEFAULT_BUILDER_IMAGE)
|
||||
additional_repos = entry.get("additionalRepos", [])
|
||||
|
||||
return {
|
||||
"app_id": build_app_id(entry["slug"]),
|
||||
"app_slug": entry["slug"],
|
||||
"app_title": entry["title"],
|
||||
"app_repo": entry["repo"],
|
||||
"app_issue": entry.get("issue", ""),
|
||||
"app_website": website,
|
||||
"app_tags": json.dumps(tags),
|
||||
"http_port": http_port,
|
||||
"base_image": base_image,
|
||||
"builder_image": builder_image,
|
||||
"generated_at": generated_at,
|
||||
"additional_repos_json": json.dumps(additional_repos, indent=2),
|
||||
"default_app_version": entry.get("defaultVersion", "latest"),
|
||||
}
|
||||
|
||||
|
||||
def render_app(entry: Dict[str, object], template_dir: pathlib.Path, apps_dir: pathlib.Path, force: bool) -> pathlib.Path:
|
||||
app_dir = apps_dir / entry["slug"]
|
||||
ensure_clean_destination(app_dir, force)
|
||||
context = build_context(entry)
|
||||
render_templates(template_dir, app_dir, context)
|
||||
return app_dir
|
||||
|
||||
|
||||
def main() -> None:
|
||||
def iter_entries(catalog: Sequence[Dict[str, object]], slugs: Iterable[str] | None) -> Iterable[Dict[str, object]]:
|
||||
if slugs is None:
|
||||
yield from catalog
|
||||
return
|
||||
|
||||
slug_set = set(slugs)
|
||||
for entry in catalog:
|
||||
if entry["slug"] in slug_set:
|
||||
yield entry
|
||||
slug_set.remove(entry["slug"])
|
||||
|
||||
if slug_set:
|
||||
missing = ", ".join(sorted(slug_set))
|
||||
raise SystemExit(f"Unknown slug(s): {missing}")
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(description="Scaffold Cloudron app packages from catalog entries")
|
||||
parser.add_argument("--slug", help="Generate scaffold for a single slug", default=None)
|
||||
parser.add_argument("--slug", action="append", dest="slugs", help="Generate scaffold for the provided slug (repeatable)")
|
||||
parser.add_argument("--catalog", default=str(ROOT / "apps" / "catalog.json"), help="Path to catalog JSON")
|
||||
parser.add_argument("--template", default=str(ROOT / "templates" / "cloudron-app"), help="Template directory")
|
||||
parser.add_argument("--apps-dir", default=str(ROOT / "apps"), help="Destination apps directory")
|
||||
parser.add_argument("--force", action="store_true", help="Overwrite existing app directory")
|
||||
args = parser.parse_args()
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
args = parse_args()
|
||||
|
||||
catalog = load_catalog(pathlib.Path(args.catalog))
|
||||
|
||||
entries = catalog
|
||||
if args.slug:
|
||||
entries = [entry for entry in catalog if entry["slug"] == args.slug]
|
||||
if not entries:
|
||||
raise SystemExit(f"Slug {args.slug} not found in catalog")
|
||||
|
||||
template_dir = pathlib.Path(args.template)
|
||||
apps_dir = pathlib.Path(args.apps_dir)
|
||||
apps_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for entry in entries:
|
||||
app_dir = ensure_app_directory(entry, template_dir, apps_dir, args.force)
|
||||
for entry in iter_entries(catalog, args.slugs):
|
||||
app_dir = render_app(entry, template_dir, apps_dir, args.force)
|
||||
print(f"Created {app_dir.relative_to(ROOT)}")
|
||||
|
||||
|
||||
|
@@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
REPO_ROOT=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)
|
||||
IMAGE_NAME=${IMAGE_NAME:-knel/cloudron-packager}
|
||||
BUILD=${BUILD:-0}
|
||||
|
||||
if [[ "${BUILD}" == "1" ]]; then
|
||||
docker build -t "${IMAGE_NAME}" "${REPO_ROOT}/docker/packager"
|
||||
fi
|
||||
|
||||
docker run --rm -it \
|
||||
-v "${REPO_ROOT}:/workspace" \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-e HOME=/home/packager \
|
||||
"${IMAGE_NAME}" "$@"
|
Reference in New Issue
Block a user